diff --git a/.projen/deps.json b/.projen/deps.json
index 2e8a4c9c8..dba9b9007 100644
--- a/.projen/deps.json
+++ b/.projen/deps.json
@@ -17,12 +17,12 @@
},
{
"name": "cdktf-cli",
- "version": "^0.19.0",
+ "version": "^0.20.0",
"type": "build"
},
{
"name": "cdktf",
- "version": "^0.19.0",
+ "version": "^0.20.0",
"type": "build"
},
{
@@ -84,7 +84,7 @@
},
{
"name": "cdktf",
- "version": "^0.19.0",
+ "version": "^0.20.0",
"type": "peer"
},
{
diff --git a/.projenrc.js b/.projenrc.js
index 4b8d6ce14..9315b23b3 100644
--- a/.projenrc.js
+++ b/.projenrc.js
@@ -7,7 +7,7 @@ const { CdktfProviderProject } = require("@cdktf/provider-project");
const project = new CdktfProviderProject({
useCustomGithubRunner: false,
terraformProvider: "databricks/databricks@~> 1.0",
- cdktfVersion: "^0.19.0",
+ cdktfVersion: "^0.20.0",
constructsVersion: "^10.3.0",
minNodeVersion: "18.12.0",
jsiiVersion: "~5.2.0",
diff --git a/docs/accessControlRuleSet.csharp.md b/docs/accessControlRuleSet.csharp.md
index 021499925..02659a17a 100644
--- a/docs/accessControlRuleSet.csharp.md
+++ b/docs/accessControlRuleSet.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1002,6 +1009,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1009,6 +1017,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/accessControlRuleSet.go.md b/docs/accessControlRuleSet.go.md
index f012e8583..fcf9e8317 100644
--- a/docs/accessControlRuleSet.go.md
+++ b/docs/accessControlRuleSet.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/accesscontrolruleset"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/accesscontrolruleset"
accesscontrolruleset.NewAccessControlRuleSet(scope Construct, id *string, config AccessControlRuleSetConfig) AccessControlRuleSet
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -398,7 +405,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/accesscontrolruleset"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/accesscontrolruleset"
accesscontrolruleset.AccessControlRuleSet_IsConstruct(x interface{}) *bool
```
@@ -430,7 +437,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/accesscontrolruleset"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/accesscontrolruleset"
accesscontrolruleset.AccessControlRuleSet_IsTerraformElement(x interface{}) *bool
```
@@ -444,7 +451,7 @@ accesscontrolruleset.AccessControlRuleSet_IsTerraformElement(x interface{}) *boo
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/accesscontrolruleset"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/accesscontrolruleset"
accesscontrolruleset.AccessControlRuleSet_IsTerraformResource(x interface{}) *bool
```
@@ -458,7 +465,7 @@ accesscontrolruleset.AccessControlRuleSet_IsTerraformResource(x interface{}) *bo
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/accesscontrolruleset"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/accesscontrolruleset"
accesscontrolruleset.AccessControlRuleSet_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -764,7 +771,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/accesscontrolruleset"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/accesscontrolruleset"
&accesscontrolruleset.AccessControlRuleSetConfig {
Connection: interface{},
@@ -913,7 +920,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/accesscontrolruleset"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/accesscontrolruleset"
&accesscontrolruleset.AccessControlRuleSetGrantRules {
Role: *string,
@@ -961,7 +968,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/accesscontrolruleset"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/accesscontrolruleset"
accesscontrolruleset.NewAccessControlRuleSetGrantRulesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) AccessControlRuleSetGrantRulesList
```
@@ -1002,6 +1009,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1009,6 +1017,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1104,7 +1128,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/accesscontrolruleset"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/accesscontrolruleset"
accesscontrolruleset.NewAccessControlRuleSetGrantRulesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) AccessControlRuleSetGrantRulesOutputReference
```
diff --git a/docs/accessControlRuleSet.java.md b/docs/accessControlRuleSet.java.md
index 61c8a572f..e9e76b443 100644
--- a/docs/accessControlRuleSet.java.md
+++ b/docs/accessControlRuleSet.java.md
@@ -144,6 +144,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -218,6 +219,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1099,6 +1106,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1106,6 +1114,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/accessControlRuleSet.python.md b/docs/accessControlRuleSet.python.md
index 7760f363c..073bf4170 100644
--- a/docs/accessControlRuleSet.python.md
+++ b/docs/accessControlRuleSet.python.md
@@ -141,6 +141,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -220,6 +221,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1143,6 +1150,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1150,6 +1158,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/accessControlRuleSet.typescript.md b/docs/accessControlRuleSet.typescript.md
index 1df100f15..78e503cb7 100644
--- a/docs/accessControlRuleSet.typescript.md
+++ b/docs/accessControlRuleSet.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -988,6 +995,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -995,6 +1003,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/artifactAllowlist.csharp.md b/docs/artifactAllowlist.csharp.md
index 06fa677c2..9170ae460 100644
--- a/docs/artifactAllowlist.csharp.md
+++ b/docs/artifactAllowlist.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1113,6 +1120,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1120,6 +1128,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/artifactAllowlist.go.md b/docs/artifactAllowlist.go.md
index 77467c612..4ff48dfca 100644
--- a/docs/artifactAllowlist.go.md
+++ b/docs/artifactAllowlist.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/artifactallowlist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/artifactallowlist"
artifactallowlist.NewArtifactAllowlist(scope Construct, id *string, config ArtifactAllowlistConfig) ArtifactAllowlist
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -412,7 +419,7 @@ func ResetMetastoreId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/artifactallowlist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/artifactallowlist"
artifactallowlist.ArtifactAllowlist_IsConstruct(x interface{}) *bool
```
@@ -444,7 +451,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/artifactallowlist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/artifactallowlist"
artifactallowlist.ArtifactAllowlist_IsTerraformElement(x interface{}) *bool
```
@@ -458,7 +465,7 @@ artifactallowlist.ArtifactAllowlist_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/artifactallowlist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/artifactallowlist"
artifactallowlist.ArtifactAllowlist_IsTerraformResource(x interface{}) *bool
```
@@ -472,7 +479,7 @@ artifactallowlist.ArtifactAllowlist_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/artifactallowlist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/artifactallowlist"
artifactallowlist.ArtifactAllowlist_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -833,7 +840,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/artifactallowlist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/artifactallowlist"
&artifactallowlist.ArtifactAllowlistArtifactMatcher {
Artifact: *string,
@@ -879,7 +886,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/artifactallowlist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/artifactallowlist"
&artifactallowlist.ArtifactAllowlistConfig {
Connection: interface{},
@@ -1072,7 +1079,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/artifactallowlist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/artifactallowlist"
artifactallowlist.NewArtifactAllowlistArtifactMatcherList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ArtifactAllowlistArtifactMatcherList
```
@@ -1113,6 +1120,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1120,6 +1128,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1215,7 +1239,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/artifactallowlist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/artifactallowlist"
artifactallowlist.NewArtifactAllowlistArtifactMatcherOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ArtifactAllowlistArtifactMatcherOutputReference
```
diff --git a/docs/artifactAllowlist.java.md b/docs/artifactAllowlist.java.md
index 0d9dd74a8..1c672fe37 100644
--- a/docs/artifactAllowlist.java.md
+++ b/docs/artifactAllowlist.java.md
@@ -174,6 +174,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -250,6 +251,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1240,6 +1247,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1247,6 +1255,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/artifactAllowlist.python.md b/docs/artifactAllowlist.python.md
index d559a5a92..a79910188 100644
--- a/docs/artifactAllowlist.python.md
+++ b/docs/artifactAllowlist.python.md
@@ -171,6 +171,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -252,6 +253,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1284,6 +1291,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1291,6 +1299,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/artifactAllowlist.typescript.md b/docs/artifactAllowlist.typescript.md
index a2bc5ff61..0a86d6ffd 100644
--- a/docs/artifactAllowlist.typescript.md
+++ b/docs/artifactAllowlist.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1096,6 +1103,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1103,6 +1111,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/awsS3Mount.csharp.md b/docs/awsS3Mount.csharp.md
index 21c157696..6e100fedb 100644
--- a/docs/awsS3Mount.csharp.md
+++ b/docs/awsS3Mount.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/awsS3Mount.go.md b/docs/awsS3Mount.go.md
index 7ddf3a081..dd1df1a9a 100644
--- a/docs/awsS3Mount.go.md
+++ b/docs/awsS3Mount.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/awss3mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/awss3mount"
awss3mount.NewAwsS3Mount(scope Construct, id *string, config AwsS3MountConfig) AwsS3Mount
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -392,7 +399,7 @@ func ResetInstanceProfile()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/awss3mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/awss3mount"
awss3mount.AwsS3Mount_IsConstruct(x interface{}) *bool
```
@@ -424,7 +431,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/awss3mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/awss3mount"
awss3mount.AwsS3Mount_IsTerraformElement(x interface{}) *bool
```
@@ -438,7 +445,7 @@ awss3mount.AwsS3Mount_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/awss3mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/awss3mount"
awss3mount.AwsS3Mount_IsTerraformResource(x interface{}) *bool
```
@@ -452,7 +459,7 @@ awss3mount.AwsS3Mount_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/awss3mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/awss3mount"
awss3mount.AwsS3Mount_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -802,7 +809,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/awss3mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/awss3mount"
&awss3mount.AwsS3MountConfig {
Connection: interface{},
diff --git a/docs/awsS3Mount.java.md b/docs/awsS3Mount.java.md
index 8ca406697..505ebcd29 100644
--- a/docs/awsS3Mount.java.md
+++ b/docs/awsS3Mount.java.md
@@ -161,6 +161,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -235,6 +236,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/awsS3Mount.python.md b/docs/awsS3Mount.python.md
index 673761efa..53bf37c7a 100644
--- a/docs/awsS3Mount.python.md
+++ b/docs/awsS3Mount.python.md
@@ -159,6 +159,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -238,6 +239,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/awsS3Mount.typescript.md b/docs/awsS3Mount.typescript.md
index 21648e554..d794d0caf 100644
--- a/docs/awsS3Mount.typescript.md
+++ b/docs/awsS3Mount.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/azureAdlsGen1Mount.csharp.md b/docs/azureAdlsGen1Mount.csharp.md
index 722170e8f..3fb30ea12 100644
--- a/docs/azureAdlsGen1Mount.csharp.md
+++ b/docs/azureAdlsGen1Mount.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/azureAdlsGen1Mount.go.md b/docs/azureAdlsGen1Mount.go.md
index 4d14b5241..bb1f61204 100644
--- a/docs/azureAdlsGen1Mount.go.md
+++ b/docs/azureAdlsGen1Mount.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen1mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen1mount"
azureadlsgen1mount.NewAzureAdlsGen1Mount(scope Construct, id *string, config AzureAdlsGen1MountConfig) AzureAdlsGen1Mount
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -399,7 +406,7 @@ func ResetSparkConfPrefix()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen1mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen1mount"
azureadlsgen1mount.AzureAdlsGen1Mount_IsConstruct(x interface{}) *bool
```
@@ -431,7 +438,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen1mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen1mount"
azureadlsgen1mount.AzureAdlsGen1Mount_IsTerraformElement(x interface{}) *bool
```
@@ -445,7 +452,7 @@ azureadlsgen1mount.AzureAdlsGen1Mount_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen1mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen1mount"
azureadlsgen1mount.AzureAdlsGen1Mount_IsTerraformResource(x interface{}) *bool
```
@@ -459,7 +466,7 @@ azureadlsgen1mount.AzureAdlsGen1Mount_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen1mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen1mount"
azureadlsgen1mount.AzureAdlsGen1Mount_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -919,7 +926,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen1mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen1mount"
&azureadlsgen1mount.AzureAdlsGen1MountConfig {
Connection: interface{},
diff --git a/docs/azureAdlsGen1Mount.java.md b/docs/azureAdlsGen1Mount.java.md
index f05413739..cf6ec7292 100644
--- a/docs/azureAdlsGen1Mount.java.md
+++ b/docs/azureAdlsGen1Mount.java.md
@@ -211,6 +211,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -286,6 +287,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/azureAdlsGen1Mount.python.md b/docs/azureAdlsGen1Mount.python.md
index a23c41e48..c13991a4e 100644
--- a/docs/azureAdlsGen1Mount.python.md
+++ b/docs/azureAdlsGen1Mount.python.md
@@ -209,6 +209,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -289,6 +290,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/azureAdlsGen1Mount.typescript.md b/docs/azureAdlsGen1Mount.typescript.md
index 28c81e480..9ed521edb 100644
--- a/docs/azureAdlsGen1Mount.typescript.md
+++ b/docs/azureAdlsGen1Mount.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/azureAdlsGen2Mount.csharp.md b/docs/azureAdlsGen2Mount.csharp.md
index e02317765..302da9349 100644
--- a/docs/azureAdlsGen2Mount.csharp.md
+++ b/docs/azureAdlsGen2Mount.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/azureAdlsGen2Mount.go.md b/docs/azureAdlsGen2Mount.go.md
index 897199fe4..5edc2373f 100644
--- a/docs/azureAdlsGen2Mount.go.md
+++ b/docs/azureAdlsGen2Mount.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen2mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen2mount"
azureadlsgen2mount.NewAzureAdlsGen2Mount(scope Construct, id *string, config AzureAdlsGen2MountConfig) AzureAdlsGen2Mount
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -392,7 +399,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen2mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen2mount"
azureadlsgen2mount.AzureAdlsGen2Mount_IsConstruct(x interface{}) *bool
```
@@ -424,7 +431,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen2mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen2mount"
azureadlsgen2mount.AzureAdlsGen2Mount_IsTerraformElement(x interface{}) *bool
```
@@ -438,7 +445,7 @@ azureadlsgen2mount.AzureAdlsGen2Mount_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen2mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen2mount"
azureadlsgen2mount.AzureAdlsGen2Mount_IsTerraformResource(x interface{}) *bool
```
@@ -452,7 +459,7 @@ azureadlsgen2mount.AzureAdlsGen2Mount_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen2mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen2mount"
azureadlsgen2mount.AzureAdlsGen2Mount_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -934,7 +941,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureadlsgen2mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureadlsgen2mount"
&azureadlsgen2mount.AzureAdlsGen2MountConfig {
Connection: interface{},
diff --git a/docs/azureAdlsGen2Mount.java.md b/docs/azureAdlsGen2Mount.java.md
index c8791572d..88a40630b 100644
--- a/docs/azureAdlsGen2Mount.java.md
+++ b/docs/azureAdlsGen2Mount.java.md
@@ -222,6 +222,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -296,6 +297,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/azureAdlsGen2Mount.python.md b/docs/azureAdlsGen2Mount.python.md
index 3760ae6aa..85ed99954 100644
--- a/docs/azureAdlsGen2Mount.python.md
+++ b/docs/azureAdlsGen2Mount.python.md
@@ -219,6 +219,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -298,6 +299,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/azureAdlsGen2Mount.typescript.md b/docs/azureAdlsGen2Mount.typescript.md
index 154dbbde0..a8dfa89d9 100644
--- a/docs/azureAdlsGen2Mount.typescript.md
+++ b/docs/azureAdlsGen2Mount.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/azureBlobMount.csharp.md b/docs/azureBlobMount.csharp.md
index eec7607c7..f6de5d250 100644
--- a/docs/azureBlobMount.csharp.md
+++ b/docs/azureBlobMount.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/azureBlobMount.go.md b/docs/azureBlobMount.go.md
index 42a079a12..196d66952 100644
--- a/docs/azureBlobMount.go.md
+++ b/docs/azureBlobMount.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureblobmount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureblobmount"
azureblobmount.NewAzureBlobMount(scope Construct, id *string, config AzureBlobMountConfig) AzureBlobMount
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -392,7 +399,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureblobmount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureblobmount"
azureblobmount.AzureBlobMount_IsConstruct(x interface{}) *bool
```
@@ -424,7 +431,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureblobmount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureblobmount"
azureblobmount.AzureBlobMount_IsTerraformElement(x interface{}) *bool
```
@@ -438,7 +445,7 @@ azureblobmount.AzureBlobMount_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureblobmount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureblobmount"
azureblobmount.AzureBlobMount_IsTerraformResource(x interface{}) *bool
```
@@ -452,7 +459,7 @@ azureblobmount.AzureBlobMount_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureblobmount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureblobmount"
azureblobmount.AzureBlobMount_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -890,7 +897,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/azureblobmount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/azureblobmount"
&azureblobmount.AzureBlobMountConfig {
Connection: interface{},
diff --git a/docs/azureBlobMount.java.md b/docs/azureBlobMount.java.md
index f9543a94e..534da71d1 100644
--- a/docs/azureBlobMount.java.md
+++ b/docs/azureBlobMount.java.md
@@ -201,6 +201,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -275,6 +276,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/azureBlobMount.python.md b/docs/azureBlobMount.python.md
index e8812aec6..f6520774c 100644
--- a/docs/azureBlobMount.python.md
+++ b/docs/azureBlobMount.python.md
@@ -199,6 +199,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -278,6 +279,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/azureBlobMount.typescript.md b/docs/azureBlobMount.typescript.md
index cde275fc5..9017631ad 100644
--- a/docs/azureBlobMount.typescript.md
+++ b/docs/azureBlobMount.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/catalog.csharp.md b/docs/catalog.csharp.md
index 5e3fddeea..edcc1a0ae 100644
--- a/docs/catalog.csharp.md
+++ b/docs/catalog.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -137,6 +138,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/catalog.go.md b/docs/catalog.go.md
index dbd906405..02d0b86e0 100644
--- a/docs/catalog.go.md
+++ b/docs/catalog.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalog"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalog"
catalog.NewCatalog(scope Construct, id *string, config CatalogConfig) Catalog
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -137,6 +138,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -455,7 +462,7 @@ func ResetStorageRoot()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalog"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalog"
catalog.Catalog_IsConstruct(x interface{}) *bool
```
@@ -487,7 +494,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalog"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalog"
catalog.Catalog_IsTerraformElement(x interface{}) *bool
```
@@ -501,7 +508,7 @@ catalog.Catalog_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalog"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalog"
catalog.Catalog_IsTerraformResource(x interface{}) *bool
```
@@ -515,7 +522,7 @@ catalog.Catalog_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalog"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalog"
catalog.Catalog_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1030,7 +1037,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalog"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalog"
&catalog.CatalogConfig {
Connection: interface{},
diff --git a/docs/catalog.java.md b/docs/catalog.java.md
index d76686427..a4c14e157 100644
--- a/docs/catalog.java.md
+++ b/docs/catalog.java.md
@@ -242,6 +242,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -325,6 +326,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/catalog.python.md b/docs/catalog.python.md
index b2e32b13a..ccc712625 100644
--- a/docs/catalog.python.md
+++ b/docs/catalog.python.md
@@ -239,6 +239,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -327,6 +328,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/catalog.typescript.md b/docs/catalog.typescript.md
index b5c86b51d..29e0a4bfa 100644
--- a/docs/catalog.typescript.md
+++ b/docs/catalog.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -137,6 +138,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/catalogWorkspaceBinding.csharp.md b/docs/catalogWorkspaceBinding.csharp.md
index 1629dd383..c94210b01 100644
--- a/docs/catalogWorkspaceBinding.csharp.md
+++ b/docs/catalogWorkspaceBinding.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/catalogWorkspaceBinding.go.md b/docs/catalogWorkspaceBinding.go.md
index 8791c94c4..0d6c13e02 100644
--- a/docs/catalogWorkspaceBinding.go.md
+++ b/docs/catalogWorkspaceBinding.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalogworkspacebinding"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalogworkspacebinding"
catalogworkspacebinding.NewCatalogWorkspaceBinding(scope Construct, id *string, config CatalogWorkspaceBindingConfig) CatalogWorkspaceBinding
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -413,7 +420,7 @@ func ResetWorkspaceId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalogworkspacebinding"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalogworkspacebinding"
catalogworkspacebinding.CatalogWorkspaceBinding_IsConstruct(x interface{}) *bool
```
@@ -445,7 +452,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalogworkspacebinding"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalogworkspacebinding"
catalogworkspacebinding.CatalogWorkspaceBinding_IsTerraformElement(x interface{}) *bool
```
@@ -459,7 +466,7 @@ catalogworkspacebinding.CatalogWorkspaceBinding_IsTerraformElement(x interface{}
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalogworkspacebinding"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalogworkspacebinding"
catalogworkspacebinding.CatalogWorkspaceBinding_IsTerraformResource(x interface{}) *bool
```
@@ -473,7 +480,7 @@ catalogworkspacebinding.CatalogWorkspaceBinding_IsTerraformResource(x interface{
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalogworkspacebinding"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalogworkspacebinding"
catalogworkspacebinding.CatalogWorkspaceBinding_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -834,7 +841,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/catalogworkspacebinding"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/catalogworkspacebinding"
&catalogworkspacebinding.CatalogWorkspaceBindingConfig {
Connection: interface{},
diff --git a/docs/catalogWorkspaceBinding.java.md b/docs/catalogWorkspaceBinding.java.md
index a2245c034..1014066c4 100644
--- a/docs/catalogWorkspaceBinding.java.md
+++ b/docs/catalogWorkspaceBinding.java.md
@@ -171,6 +171,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -248,6 +249,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/catalogWorkspaceBinding.python.md b/docs/catalogWorkspaceBinding.python.md
index 7483570d8..cc3aab220 100644
--- a/docs/catalogWorkspaceBinding.python.md
+++ b/docs/catalogWorkspaceBinding.python.md
@@ -169,6 +169,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -251,6 +252,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/catalogWorkspaceBinding.typescript.md b/docs/catalogWorkspaceBinding.typescript.md
index 528e0fd1b..2260535d7 100644
--- a/docs/catalogWorkspaceBinding.typescript.md
+++ b/docs/catalogWorkspaceBinding.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/cluster.csharp.md b/docs/cluster.csharp.md
index 588173ed6..36a8ff6c8 100644
--- a/docs/cluster.csharp.md
+++ b/docs/cluster.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -169,6 +170,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -6210,6 +6217,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -6217,6 +6225,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -9140,6 +9164,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -9147,6 +9172,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -11145,6 +11186,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -11152,6 +11194,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/cluster.go.md b/docs/cluster.go.md
index 295c09ae3..87159024f 100644
--- a/docs/cluster.go.md
+++ b/docs/cluster.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewCluster(scope Construct, id *string, config ClusterConfig) Cluster
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -169,6 +170,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -745,7 +752,7 @@ func ResetWorkloadType()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.Cluster_IsConstruct(x interface{}) *bool
```
@@ -777,7 +784,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.Cluster_IsTerraformElement(x interface{}) *bool
```
@@ -791,7 +798,7 @@ cluster.Cluster_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.Cluster_IsTerraformResource(x interface{}) *bool
```
@@ -805,7 +812,7 @@ cluster.Cluster_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.Cluster_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1815,7 +1822,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterAutoscale {
MaxWorkers: *f64,
@@ -1861,7 +1868,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterAwsAttributes {
Availability: *string,
@@ -1991,7 +1998,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterAzureAttributes {
Availability: *string,
@@ -2051,11 +2058,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterClusterLogConf {
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterClusterLogConfDbfs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterClusterLogConfS3,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterClusterLogConfDbfs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterClusterLogConfS3,
}
```
@@ -2101,7 +2108,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterClusterLogConfDbfs {
Destination: *string,
@@ -2133,7 +2140,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterClusterLogConfS3 {
Destination: *string,
@@ -2249,11 +2256,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterClusterMountInfo {
LocalMountDirPath: *string,
- NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterClusterMountInfoNetworkFilesystemInfo,
+ NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterClusterMountInfoNetworkFilesystemInfo,
RemoteMountDirPath: *string,
}
```
@@ -2311,7 +2318,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterClusterMountInfoNetworkFilesystemInfo {
ServerAddress: *string,
@@ -2357,7 +2364,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterConfig {
Connection: interface{},
@@ -2369,22 +2376,22 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
Provisioners: *[]interface{},
SparkVersion: *string,
ApplyPolicyDefaultValues: interface{},
- Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterAutoscale,
+ Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterAutoscale,
AutoterminationMinutes: *f64,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterAzureAttributes,
ClusterId: *string,
- ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterClusterLogConf,
+ ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterClusterLogConf,
ClusterMountInfo: interface{},
ClusterName: *string,
CustomTags: *map[string]*string,
DataSecurityMode: *string,
- DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterDockerImage,
+ DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterDockerImage,
DriverInstancePoolId: *string,
DriverNodeTypeId: *string,
EnableElasticDisk: interface{},
EnableLocalDiskEncryption: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterGcpAttributes,
Id: *string,
IdempotencyToken: *string,
InitScripts: interface{},
@@ -2399,8 +2406,8 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
SparkConf: *map[string]*string,
SparkEnvVars: *map[string]*string,
SshPublicKeys: *[]*string,
- Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterTimeouts,
- WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterWorkloadType,
+ Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterTimeouts,
+ WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterWorkloadType,
}
```
@@ -2960,11 +2967,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterDockerImageBasicAuth,
}
```
@@ -3008,7 +3015,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterDockerImageBasicAuth {
Password: *string,
@@ -3054,7 +3061,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterGcpAttributes {
Availability: *string,
@@ -3156,16 +3163,16 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterInitScripts {
- Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterInitScriptsAbfss,
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterInitScriptsDbfs,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterInitScriptsFile,
- Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterInitScriptsGcs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterInitScriptsS3,
- Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterInitScriptsVolumes,
- Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterInitScriptsWorkspace,
+ Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterInitScriptsAbfss,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterInitScriptsDbfs,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterInitScriptsFile,
+ Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterInitScriptsGcs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterInitScriptsS3,
+ Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterInitScriptsVolumes,
+ Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterInitScriptsWorkspace,
}
```
@@ -3286,7 +3293,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterInitScriptsAbfss {
Destination: *string,
@@ -3318,7 +3325,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterInitScriptsDbfs {
Destination: *string,
@@ -3350,7 +3357,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterInitScriptsFile {
Destination: *string,
@@ -3382,7 +3389,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterInitScriptsGcs {
Destination: *string,
@@ -3414,7 +3421,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterInitScriptsS3 {
Destination: *string,
@@ -3530,7 +3537,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterInitScriptsVolumes {
Destination: *string,
@@ -3562,7 +3569,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterInitScriptsWorkspace {
Destination: *string,
@@ -3594,14 +3601,14 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterLibrary {
- Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterLibraryCran,
+ Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterLibraryCran,
Egg: *string,
Jar: *string,
- Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterLibraryMaven,
- Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterLibraryPypi,
+ Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterLibraryMaven,
+ Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterLibraryPypi,
Whl: *string,
}
```
@@ -3702,7 +3709,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterLibraryCran {
Package: *string,
@@ -3748,7 +3755,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterLibraryMaven {
Coordinates: *string,
@@ -3808,7 +3815,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterLibraryPypi {
Package: *string,
@@ -3854,7 +3861,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterTimeouts {
Create: *string,
@@ -3914,10 +3921,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterWorkloadType {
- Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.cluster.ClusterWorkloadTypeClients,
+ Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.cluster.ClusterWorkloadTypeClients,
}
```
@@ -3948,7 +3955,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
&cluster.ClusterWorkloadTypeClients {
Jobs: interface{},
@@ -3996,7 +4003,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterAutoscaleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterAutoscaleOutputReference
```
@@ -4303,7 +4310,7 @@ func InternalValue() ClusterAutoscale
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterAwsAttributesOutputReference
```
@@ -4784,7 +4791,7 @@ func InternalValue() ClusterAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterAzureAttributesOutputReference
```
@@ -5120,7 +5127,7 @@ func InternalValue() ClusterAzureAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterClusterLogConfDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterClusterLogConfDbfsOutputReference
```
@@ -5391,7 +5398,7 @@ func InternalValue() ClusterClusterLogConfDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterClusterLogConfOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterClusterLogConfOutputReference
```
@@ -5724,7 +5731,7 @@ func InternalValue() ClusterClusterLogConf
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterClusterLogConfS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterClusterLogConfS3OutputReference
```
@@ -6169,7 +6176,7 @@ func InternalValue() ClusterClusterLogConfS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterClusterMountInfoList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ClusterClusterMountInfoList
```
@@ -6210,6 +6217,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -6217,6 +6225,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -6312,7 +6336,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterClusterMountInfoNetworkFilesystemInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterClusterMountInfoNetworkFilesystemInfoOutputReference
```
@@ -6612,7 +6636,7 @@ func InternalValue() ClusterClusterMountInfoNetworkFilesystemInfo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterClusterMountInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ClusterClusterMountInfoOutputReference
```
@@ -6965,7 +6989,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterDockerImageBasicAuthOutputReference
```
@@ -7258,7 +7282,7 @@ func InternalValue() ClusterDockerImageBasicAuth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterDockerImageOutputReference
```
@@ -7571,7 +7595,7 @@ func InternalValue() ClusterDockerImage
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterGcpAttributesOutputReference
```
@@ -7994,7 +8018,7 @@ func InternalValue() ClusterGcpAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterInitScriptsAbfssOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterInitScriptsAbfssOutputReference
```
@@ -8272,7 +8296,7 @@ func InternalValue() ClusterInitScriptsAbfss
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterInitScriptsDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterInitScriptsDbfsOutputReference
```
@@ -8543,7 +8567,7 @@ func InternalValue() ClusterInitScriptsDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterInitScriptsFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterInitScriptsFileOutputReference
```
@@ -8821,7 +8845,7 @@ func InternalValue() ClusterInitScriptsFile
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterInitScriptsGcsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterInitScriptsGcsOutputReference
```
@@ -9099,7 +9123,7 @@ func InternalValue() ClusterInitScriptsGcs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterInitScriptsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ClusterInitScriptsList
```
@@ -9140,6 +9164,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -9147,6 +9172,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -9242,7 +9283,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterInitScriptsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ClusterInitScriptsOutputReference
```
@@ -9803,7 +9844,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterInitScriptsS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterInitScriptsS3OutputReference
```
@@ -10248,7 +10289,7 @@ func InternalValue() ClusterInitScriptsS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterInitScriptsVolumesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterInitScriptsVolumesOutputReference
```
@@ -10526,7 +10567,7 @@ func InternalValue() ClusterInitScriptsVolumes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterInitScriptsWorkspaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterInitScriptsWorkspaceOutputReference
```
@@ -10804,7 +10845,7 @@ func InternalValue() ClusterInitScriptsWorkspace
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterLibraryCranOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterLibraryCranOutputReference
```
@@ -11104,7 +11145,7 @@ func InternalValue() ClusterLibraryCran
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterLibraryList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ClusterLibraryList
```
@@ -11145,6 +11186,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -11152,6 +11194,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -11247,7 +11305,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterLibraryMavenOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterLibraryMavenOutputReference
```
@@ -11576,7 +11634,7 @@ func InternalValue() ClusterLibraryMaven
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterLibraryOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ClusterLibraryOutputReference
```
@@ -12056,7 +12114,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterLibraryPypiOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterLibraryPypiOutputReference
```
@@ -12356,7 +12414,7 @@ func InternalValue() ClusterLibraryPypi
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterTimeoutsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterTimeoutsOutputReference
```
@@ -12692,7 +12750,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterWorkloadTypeClientsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterWorkloadTypeClientsOutputReference
```
@@ -12999,7 +13057,7 @@ func InternalValue() ClusterWorkloadTypeClients
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/cluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/cluster"
cluster.NewClusterWorkloadTypeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterWorkloadTypeOutputReference
```
diff --git a/docs/cluster.java.md b/docs/cluster.java.md
index 833e0487c..2d08647cf 100644
--- a/docs/cluster.java.md
+++ b/docs/cluster.java.md
@@ -480,6 +480,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -595,6 +596,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -6654,6 +6661,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -6661,6 +6669,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -9584,6 +9608,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -9591,6 +9616,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -11589,6 +11630,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -11596,6 +11638,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/cluster.python.md b/docs/cluster.python.md
index 7bdc2b8dc..0cb176c4e 100644
--- a/docs/cluster.python.md
+++ b/docs/cluster.python.md
@@ -471,6 +471,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -591,6 +592,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -7108,6 +7115,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -7115,6 +7123,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -10299,6 +10325,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -10306,6 +10333,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -12521,6 +12566,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -12528,6 +12574,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/cluster.typescript.md b/docs/cluster.typescript.md
index 6ce45ffe5..882970825 100644
--- a/docs/cluster.typescript.md
+++ b/docs/cluster.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -169,6 +170,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -6065,6 +6072,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -6072,6 +6080,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -8995,6 +9019,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -9002,6 +9027,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -11000,6 +11041,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -11007,6 +11049,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/clusterPolicy.csharp.md b/docs/clusterPolicy.csharp.md
index 922b707c0..613c2c685 100644
--- a/docs/clusterPolicy.csharp.md
+++ b/docs/clusterPolicy.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1731,6 +1738,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1738,6 +1746,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/clusterPolicy.go.md b/docs/clusterPolicy.go.md
index b28db2854..36e0e7a35 100644
--- a/docs/clusterPolicy.go.md
+++ b/docs/clusterPolicy.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.NewClusterPolicy(scope Construct, id *string, config ClusterPolicyConfig) ClusterPolicy
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -433,7 +440,7 @@ func ResetPolicyFamilyId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.ClusterPolicy_IsConstruct(x interface{}) *bool
```
@@ -465,7 +472,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.ClusterPolicy_IsTerraformElement(x interface{}) *bool
```
@@ -479,7 +486,7 @@ clusterpolicy.ClusterPolicy_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.ClusterPolicy_IsTerraformResource(x interface{}) *bool
```
@@ -493,7 +500,7 @@ clusterpolicy.ClusterPolicy_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.ClusterPolicy_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -909,7 +916,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
&clusterpolicy.ClusterPolicyConfig {
Connection: interface{},
@@ -1128,14 +1135,14 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
&clusterpolicy.ClusterPolicyLibraries {
- Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.clusterPolicy.ClusterPolicyLibrariesCran,
+ Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.clusterPolicy.ClusterPolicyLibrariesCran,
Egg: *string,
Jar: *string,
- Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.clusterPolicy.ClusterPolicyLibrariesMaven,
- Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.clusterPolicy.ClusterPolicyLibrariesPypi,
+ Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.clusterPolicy.ClusterPolicyLibrariesMaven,
+ Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.clusterPolicy.ClusterPolicyLibrariesPypi,
Whl: *string,
}
```
@@ -1236,7 +1243,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
&clusterpolicy.ClusterPolicyLibrariesCran {
Package: *string,
@@ -1282,7 +1289,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
&clusterpolicy.ClusterPolicyLibrariesMaven {
Coordinates: *string,
@@ -1342,7 +1349,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
&clusterpolicy.ClusterPolicyLibrariesPypi {
Package: *string,
@@ -1390,7 +1397,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.NewClusterPolicyLibrariesCranOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterPolicyLibrariesCranOutputReference
```
@@ -1690,7 +1697,7 @@ func InternalValue() ClusterPolicyLibrariesCran
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.NewClusterPolicyLibrariesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ClusterPolicyLibrariesList
```
@@ -1731,6 +1738,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1738,6 +1746,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1833,7 +1857,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.NewClusterPolicyLibrariesMavenOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterPolicyLibrariesMavenOutputReference
```
@@ -2162,7 +2186,7 @@ func InternalValue() ClusterPolicyLibrariesMaven
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.NewClusterPolicyLibrariesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ClusterPolicyLibrariesOutputReference
```
@@ -2642,7 +2666,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/clusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/clusterpolicy"
clusterpolicy.NewClusterPolicyLibrariesPypiOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ClusterPolicyLibrariesPypiOutputReference
```
diff --git a/docs/clusterPolicy.java.md b/docs/clusterPolicy.java.md
index ff592b01d..3c997aeb1 100644
--- a/docs/clusterPolicy.java.md
+++ b/docs/clusterPolicy.java.md
@@ -194,6 +194,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -273,6 +274,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1878,6 +1885,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1885,6 +1893,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/clusterPolicy.python.md b/docs/clusterPolicy.python.md
index 2ca2396b5..0fc7f0291 100644
--- a/docs/clusterPolicy.python.md
+++ b/docs/clusterPolicy.python.md
@@ -191,6 +191,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -275,6 +276,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1947,6 +1954,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1954,6 +1962,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/clusterPolicy.typescript.md b/docs/clusterPolicy.typescript.md
index ca6855256..c5660ef3a 100644
--- a/docs/clusterPolicy.typescript.md
+++ b/docs/clusterPolicy.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1698,6 +1705,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1705,6 +1713,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/connection.csharp.md b/docs/connection.csharp.md
index 350516e86..23fbdfdb8 100644
--- a/docs/connection.csharp.md
+++ b/docs/connection.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/connection.go.md b/docs/connection.go.md
index 2594d7c6d..f6bfec9a5 100644
--- a/docs/connection.go.md
+++ b/docs/connection.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/connection"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/connection"
connection.NewConnection(scope Construct, id *string, config ConnectionConfig) Connection
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -413,7 +420,7 @@ func ResetReadOnly()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/connection"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/connection"
connection.Connection_IsConstruct(x interface{}) *bool
```
@@ -445,7 +452,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/connection"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/connection"
connection.Connection_IsTerraformElement(x interface{}) *bool
```
@@ -459,7 +466,7 @@ connection.Connection_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/connection"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/connection"
connection.Connection_IsTerraformResource(x interface{}) *bool
```
@@ -473,7 +480,7 @@ connection.Connection_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/connection"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/connection"
connection.Connection_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -900,7 +907,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/connection"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/connection"
&connection.ConnectionConfig {
Connection: interface{},
diff --git a/docs/connection.java.md b/docs/connection.java.md
index 9a677608f..00115a9aa 100644
--- a/docs/connection.java.md
+++ b/docs/connection.java.md
@@ -202,6 +202,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -279,6 +280,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/connection.python.md b/docs/connection.python.md
index ae23f5f14..14693a9b9 100644
--- a/docs/connection.python.md
+++ b/docs/connection.python.md
@@ -199,6 +199,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -281,6 +282,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/connection.typescript.md b/docs/connection.typescript.md
index ec90c25e1..d9b6577ad 100644
--- a/docs/connection.typescript.md
+++ b/docs/connection.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksAwsAssumeRolePolicy.csharp.md b/docs/dataDatabricksAwsAssumeRolePolicy.csharp.md
index c2a1116f0..9ee9ea3cd 100644
--- a/docs/dataDatabricksAwsAssumeRolePolicy.csharp.md
+++ b/docs/dataDatabricksAwsAssumeRolePolicy.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksAwsAssumeRolePolicy.go.md b/docs/dataDatabricksAwsAssumeRolePolicy.go.md
index d1582f599..3c8fdc504 100644
--- a/docs/dataDatabricksAwsAssumeRolePolicy.go.md
+++ b/docs/dataDatabricksAwsAssumeRolePolicy.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsassumerolepolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsassumerolepolicy"
datadatabricksawsassumerolepolicy.NewDataDatabricksAwsAssumeRolePolicy(scope Construct, id *string, config DataDatabricksAwsAssumeRolePolicyConfig) DataDatabricksAwsAssumeRolePolicy
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -288,7 +297,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsassumerolepolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsassumerolepolicy"
datadatabricksawsassumerolepolicy.DataDatabricksAwsAssumeRolePolicy_IsConstruct(x interface{}) *bool
```
@@ -320,7 +329,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsassumerolepolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsassumerolepolicy"
datadatabricksawsassumerolepolicy.DataDatabricksAwsAssumeRolePolicy_IsTerraformElement(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabricksawsassumerolepolicy.DataDatabricksAwsAssumeRolePolicy_IsTerraformE
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsassumerolepolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsassumerolepolicy"
datadatabricksawsassumerolepolicy.DataDatabricksAwsAssumeRolePolicy_IsTerraformDataSource(x interface{}) *bool
```
@@ -348,7 +357,7 @@ datadatabricksawsassumerolepolicy.DataDatabricksAwsAssumeRolePolicy_IsTerraformD
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsassumerolepolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsassumerolepolicy"
datadatabricksawsassumerolepolicy.DataDatabricksAwsAssumeRolePolicy_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -654,7 +663,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsassumerolepolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsassumerolepolicy"
&datadatabricksawsassumerolepolicy.DataDatabricksAwsAssumeRolePolicyConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksAwsAssumeRolePolicy.java.md b/docs/dataDatabricksAwsAssumeRolePolicy.java.md
index cea4ea008..57b7c3086 100644
--- a/docs/dataDatabricksAwsAssumeRolePolicy.java.md
+++ b/docs/dataDatabricksAwsAssumeRolePolicy.java.md
@@ -154,6 +154,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -222,6 +223,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksAwsAssumeRolePolicy.python.md b/docs/dataDatabricksAwsAssumeRolePolicy.python.md
index 8d4f196ad..a68f1d9f0 100644
--- a/docs/dataDatabricksAwsAssumeRolePolicy.python.md
+++ b/docs/dataDatabricksAwsAssumeRolePolicy.python.md
@@ -151,6 +151,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -224,6 +225,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksAwsAssumeRolePolicy.typescript.md b/docs/dataDatabricksAwsAssumeRolePolicy.typescript.md
index d7c55ba90..70132fede 100644
--- a/docs/dataDatabricksAwsAssumeRolePolicy.typescript.md
+++ b/docs/dataDatabricksAwsAssumeRolePolicy.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksAwsBucketPolicy.csharp.md b/docs/dataDatabricksAwsBucketPolicy.csharp.md
index ac619098f..59a3395a3 100644
--- a/docs/dataDatabricksAwsBucketPolicy.csharp.md
+++ b/docs/dataDatabricksAwsBucketPolicy.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -123,6 +124,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksAwsBucketPolicy.go.md b/docs/dataDatabricksAwsBucketPolicy.go.md
index c47cc1ac8..215ebb9c2 100644
--- a/docs/dataDatabricksAwsBucketPolicy.go.md
+++ b/docs/dataDatabricksAwsBucketPolicy.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsbucketpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsbucketpolicy"
datadatabricksawsbucketpolicy.NewDataDatabricksAwsBucketPolicy(scope Construct, id *string, config DataDatabricksAwsBucketPolicyConfig) DataDatabricksAwsBucketPolicy
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -123,6 +124,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -295,7 +304,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsbucketpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsbucketpolicy"
datadatabricksawsbucketpolicy.DataDatabricksAwsBucketPolicy_IsConstruct(x interface{}) *bool
```
@@ -327,7 +336,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsbucketpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsbucketpolicy"
datadatabricksawsbucketpolicy.DataDatabricksAwsBucketPolicy_IsTerraformElement(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksawsbucketpolicy.DataDatabricksAwsBucketPolicy_IsTerraformElement(x
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsbucketpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsbucketpolicy"
datadatabricksawsbucketpolicy.DataDatabricksAwsBucketPolicy_IsTerraformDataSource(x interface{}) *bool
```
@@ -355,7 +364,7 @@ datadatabricksawsbucketpolicy.DataDatabricksAwsBucketPolicy_IsTerraformDataSourc
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsbucketpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsbucketpolicy"
datadatabricksawsbucketpolicy.DataDatabricksAwsBucketPolicy_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -683,7 +692,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawsbucketpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawsbucketpolicy"
&datadatabricksawsbucketpolicy.DataDatabricksAwsBucketPolicyConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksAwsBucketPolicy.java.md b/docs/dataDatabricksAwsBucketPolicy.java.md
index 8dd2771ec..772d2425f 100644
--- a/docs/dataDatabricksAwsBucketPolicy.java.md
+++ b/docs/dataDatabricksAwsBucketPolicy.java.md
@@ -161,6 +161,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -230,6 +231,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksAwsBucketPolicy.python.md b/docs/dataDatabricksAwsBucketPolicy.python.md
index e53714ee5..5249465b1 100644
--- a/docs/dataDatabricksAwsBucketPolicy.python.md
+++ b/docs/dataDatabricksAwsBucketPolicy.python.md
@@ -159,6 +159,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -233,6 +234,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksAwsBucketPolicy.typescript.md b/docs/dataDatabricksAwsBucketPolicy.typescript.md
index b2dbf52ee..eefc5e95c 100644
--- a/docs/dataDatabricksAwsBucketPolicy.typescript.md
+++ b/docs/dataDatabricksAwsBucketPolicy.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -123,6 +124,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksAwsCrossaccountPolicy.csharp.md b/docs/dataDatabricksAwsCrossaccountPolicy.csharp.md
index 34de6932d..ce1881530 100644
--- a/docs/dataDatabricksAwsCrossaccountPolicy.csharp.md
+++ b/docs/dataDatabricksAwsCrossaccountPolicy.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksAwsCrossaccountPolicy.go.md b/docs/dataDatabricksAwsCrossaccountPolicy.go.md
index 86251349c..890d13846 100644
--- a/docs/dataDatabricksAwsCrossaccountPolicy.go.md
+++ b/docs/dataDatabricksAwsCrossaccountPolicy.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawscrossaccountpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawscrossaccountpolicy"
datadatabricksawscrossaccountpolicy.NewDataDatabricksAwsCrossaccountPolicy(scope Construct, id *string, config DataDatabricksAwsCrossaccountPolicyConfig) DataDatabricksAwsCrossaccountPolicy
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetPassRoles()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawscrossaccountpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawscrossaccountpolicy"
datadatabricksawscrossaccountpolicy.DataDatabricksAwsCrossaccountPolicy_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawscrossaccountpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawscrossaccountpolicy"
datadatabricksawscrossaccountpolicy.DataDatabricksAwsCrossaccountPolicy_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksawscrossaccountpolicy.DataDatabricksAwsCrossaccountPolicy_IsTerraf
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawscrossaccountpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawscrossaccountpolicy"
datadatabricksawscrossaccountpolicy.DataDatabricksAwsCrossaccountPolicy_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksawscrossaccountpolicy.DataDatabricksAwsCrossaccountPolicy_IsTerraf
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawscrossaccountpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawscrossaccountpolicy"
datadatabricksawscrossaccountpolicy.DataDatabricksAwsCrossaccountPolicy_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -603,7 +612,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksawscrossaccountpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksawscrossaccountpolicy"
&datadatabricksawscrossaccountpolicy.DataDatabricksAwsCrossaccountPolicyConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksAwsCrossaccountPolicy.java.md b/docs/dataDatabricksAwsCrossaccountPolicy.java.md
index 176c844d4..b4d9e544e 100644
--- a/docs/dataDatabricksAwsCrossaccountPolicy.java.md
+++ b/docs/dataDatabricksAwsCrossaccountPolicy.java.md
@@ -131,6 +131,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -198,6 +199,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksAwsCrossaccountPolicy.python.md b/docs/dataDatabricksAwsCrossaccountPolicy.python.md
index ea8e4a471..f572728b2 100644
--- a/docs/dataDatabricksAwsCrossaccountPolicy.python.md
+++ b/docs/dataDatabricksAwsCrossaccountPolicy.python.md
@@ -129,6 +129,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -201,6 +202,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksAwsCrossaccountPolicy.typescript.md b/docs/dataDatabricksAwsCrossaccountPolicy.typescript.md
index bd97cc81a..30acc9a55 100644
--- a/docs/dataDatabricksAwsCrossaccountPolicy.typescript.md
+++ b/docs/dataDatabricksAwsCrossaccountPolicy.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksCatalogs.csharp.md b/docs/dataDatabricksCatalogs.csharp.md
index 10bfe7c5b..52195b652 100644
--- a/docs/dataDatabricksCatalogs.csharp.md
+++ b/docs/dataDatabricksCatalogs.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksCatalogs.go.md b/docs/dataDatabricksCatalogs.go.md
index 8ec0c7e0d..d5588ce5d 100644
--- a/docs/dataDatabricksCatalogs.go.md
+++ b/docs/dataDatabricksCatalogs.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscatalogs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscatalogs"
datadatabrickscatalogs.NewDataDatabricksCatalogs(scope Construct, id *string, config DataDatabricksCatalogsConfig) DataDatabricksCatalogs
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetIds()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscatalogs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscatalogs"
datadatabrickscatalogs.DataDatabricksCatalogs_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscatalogs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscatalogs"
datadatabrickscatalogs.DataDatabricksCatalogs_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabrickscatalogs.DataDatabricksCatalogs_IsTerraformElement(x interface{})
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscatalogs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscatalogs"
datadatabrickscatalogs.DataDatabricksCatalogs_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabrickscatalogs.DataDatabricksCatalogs_IsTerraformDataSource(x interface{
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscatalogs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscatalogs"
datadatabrickscatalogs.DataDatabricksCatalogs_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -592,7 +601,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscatalogs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscatalogs"
&datadatabrickscatalogs.DataDatabricksCatalogsConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksCatalogs.java.md b/docs/dataDatabricksCatalogs.java.md
index a7d64eeee..b173845b8 100644
--- a/docs/dataDatabricksCatalogs.java.md
+++ b/docs/dataDatabricksCatalogs.java.md
@@ -131,6 +131,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -198,6 +199,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksCatalogs.python.md b/docs/dataDatabricksCatalogs.python.md
index e32807231..753b84fb5 100644
--- a/docs/dataDatabricksCatalogs.python.md
+++ b/docs/dataDatabricksCatalogs.python.md
@@ -129,6 +129,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -201,6 +202,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksCatalogs.typescript.md b/docs/dataDatabricksCatalogs.typescript.md
index d467f75af..cafbf221d 100644
--- a/docs/dataDatabricksCatalogs.typescript.md
+++ b/docs/dataDatabricksCatalogs.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksCluster.csharp.md b/docs/dataDatabricksCluster.csharp.md
index 44acbf305..692f04b94 100644
--- a/docs/dataDatabricksCluster.csharp.md
+++ b/docs/dataDatabricksCluster.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -124,6 +125,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
@@ -6814,6 +6823,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -6821,6 +6831,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -9246,6 +9272,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -9253,6 +9280,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/dataDatabricksCluster.go.md b/docs/dataDatabricksCluster.go.md
index eebe96d6e..2d6601e93 100644
--- a/docs/dataDatabricksCluster.go.md
+++ b/docs/dataDatabricksCluster.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksCluster(scope Construct, id *string, config DataDatabricksClusterConfig) DataDatabricksCluster
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -124,6 +125,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -308,7 +317,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.DataDatabricksCluster_IsConstruct(x interface{}) *bool
```
@@ -340,7 +349,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.DataDatabricksCluster_IsTerraformElement(x interface{}) *bool
```
@@ -354,7 +363,7 @@ datadatabrickscluster.DataDatabricksCluster_IsTerraformElement(x interface{}) *b
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.DataDatabricksCluster_IsTerraformDataSource(x interface{}) *bool
```
@@ -368,7 +377,7 @@ datadatabrickscluster.DataDatabricksCluster_IsTerraformDataSource(x interface{})
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.DataDatabricksCluster_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -663,34 +672,34 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfo {
DefaultTags: *map[string]*string,
SparkVersion: *string,
State: *string,
- Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoAutoscale,
+ Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoAutoscale,
AutoterminationMinutes: *f64,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoAzureAttributes,
ClusterCores: *f64,
ClusterId: *string,
- ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoClusterLogConf,
- ClusterLogStatus: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoClusterLogStatus,
+ ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoClusterLogConf,
+ ClusterLogStatus: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoClusterLogStatus,
ClusterMemoryMb: *f64,
ClusterName: *string,
ClusterSource: *string,
CreatorUserName: *string,
CustomTags: *map[string]*string,
DataSecurityMode: *string,
- DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoDockerImage,
- Driver: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoDriver,
+ DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoDockerImage,
+ Driver: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoDriver,
DriverInstancePoolId: *string,
DriverNodeTypeId: *string,
EnableElasticDisk: interface{},
EnableLocalDiskEncryption: interface{},
Executors: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoGcpAttributes,
InitScripts: interface{},
InstancePoolId: *string,
JdbcPort: *f64,
@@ -708,7 +717,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
StartTime: *f64,
StateMessage: *string,
TerminateTime: *f64,
- TerminationReason: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoTerminationReason,
+ TerminationReason: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoTerminationReason,
}
```
@@ -1305,7 +1314,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoAutoscale {
MaxWorkers: *f64,
@@ -1351,7 +1360,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoAwsAttributes {
Availability: *string,
@@ -1481,7 +1490,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoAzureAttributes {
Availability: *string,
@@ -1541,11 +1550,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoClusterLogConf {
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoClusterLogConfDbfs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoClusterLogConfS3,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoClusterLogConfDbfs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoClusterLogConfS3,
}
```
@@ -1591,7 +1600,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoClusterLogConfDbfs {
Destination: *string,
@@ -1623,7 +1632,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoClusterLogConfS3 {
Destination: *string,
@@ -1739,7 +1748,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoClusterLogStatus {
LastAttempted: *f64,
@@ -1785,11 +1794,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoDockerImageBasicAuth,
}
```
@@ -1833,7 +1842,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoDockerImageBasicAuth {
Password: *string,
@@ -1879,12 +1888,12 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoDriver {
HostPrivateIp: *string,
InstanceId: *string,
- NodeAwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoDriverNodeAwsAttributes,
+ NodeAwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoDriverNodeAwsAttributes,
NodeId: *string,
PrivateIp: *string,
PublicDns: *string,
@@ -1997,7 +2006,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoDriverNodeAwsAttributes {
IsSpot: interface{},
@@ -2029,12 +2038,12 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoExecutors {
HostPrivateIp: *string,
InstanceId: *string,
- NodeAwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoExecutorsNodeAwsAttributes,
+ NodeAwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoExecutorsNodeAwsAttributes,
NodeId: *string,
PrivateIp: *string,
PublicDns: *string,
@@ -2147,7 +2156,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoExecutorsNodeAwsAttributes {
IsSpot: interface{},
@@ -2179,7 +2188,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoGcpAttributes {
Availability: *string,
@@ -2281,16 +2290,16 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoInitScripts {
- Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsAbfss,
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsDbfs,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsFile,
- Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsGcs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsS3,
- Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsVolumes,
- Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsWorkspace,
+ Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsAbfss,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsDbfs,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsFile,
+ Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsGcs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsS3,
+ Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsVolumes,
+ Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfoInitScriptsWorkspace,
}
```
@@ -2411,7 +2420,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoInitScriptsAbfss {
Destination: *string,
@@ -2443,7 +2452,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoInitScriptsDbfs {
Destination: *string,
@@ -2475,7 +2484,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoInitScriptsFile {
Destination: *string,
@@ -2507,7 +2516,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoInitScriptsGcs {
Destination: *string,
@@ -2539,7 +2548,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoInitScriptsS3 {
Destination: *string,
@@ -2655,7 +2664,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoInitScriptsVolumes {
Destination: *string,
@@ -2687,7 +2696,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoInitScriptsWorkspace {
Destination: *string,
@@ -2719,7 +2728,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterClusterInfoTerminationReason {
Code: *string,
@@ -2779,7 +2788,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
&datadatabrickscluster.DataDatabricksClusterConfig {
Connection: interface{},
@@ -2790,7 +2799,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
Provider: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformProvider,
Provisioners: *[]interface{},
ClusterId: *string,
- ClusterInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksCluster.DataDatabricksClusterClusterInfo,
+ ClusterInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksCluster.DataDatabricksClusterClusterInfo,
ClusterName: *string,
Id: *string,
}
@@ -2944,7 +2953,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoAutoscaleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoAutoscaleOutputReference
```
@@ -3251,7 +3260,7 @@ func InternalValue() DataDatabricksClusterClusterInfoAutoscale
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoAwsAttributesOutputReference
```
@@ -3732,7 +3741,7 @@ func InternalValue() DataDatabricksClusterClusterInfoAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoAzureAttributesOutputReference
```
@@ -4068,7 +4077,7 @@ func InternalValue() DataDatabricksClusterClusterInfoAzureAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoClusterLogConfDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoClusterLogConfDbfsOutputReference
```
@@ -4339,7 +4348,7 @@ func InternalValue() DataDatabricksClusterClusterInfoClusterLogConfDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoClusterLogConfOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoClusterLogConfOutputReference
```
@@ -4672,7 +4681,7 @@ func InternalValue() DataDatabricksClusterClusterInfoClusterLogConf
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoClusterLogConfS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoClusterLogConfS3OutputReference
```
@@ -5117,7 +5126,7 @@ func InternalValue() DataDatabricksClusterClusterInfoClusterLogConfS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoClusterLogStatusOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoClusterLogStatusOutputReference
```
@@ -5424,7 +5433,7 @@ func InternalValue() DataDatabricksClusterClusterInfoClusterLogStatus
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoDockerImageBasicAuthOutputReference
```
@@ -5717,7 +5726,7 @@ func InternalValue() DataDatabricksClusterClusterInfoDockerImageBasicAuth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoDockerImageOutputReference
```
@@ -6030,7 +6039,7 @@ func InternalValue() DataDatabricksClusterClusterInfoDockerImage
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoDriverNodeAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoDriverNodeAwsAttributesOutputReference
```
@@ -6308,7 +6317,7 @@ func InternalValue() DataDatabricksClusterClusterInfoDriverNodeAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoDriverOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoDriverOutputReference
```
@@ -6773,7 +6782,7 @@ func InternalValue() DataDatabricksClusterClusterInfoDriver
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoExecutorsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksClusterClusterInfoExecutorsList
```
@@ -6814,6 +6823,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -6821,6 +6831,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -6916,7 +6942,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoExecutorsNodeAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoExecutorsNodeAwsAttributesOutputReference
```
@@ -7194,7 +7220,7 @@ func InternalValue() DataDatabricksClusterClusterInfoExecutorsNodeAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoExecutorsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksClusterClusterInfoExecutorsOutputReference
```
@@ -7677,7 +7703,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoGcpAttributesOutputReference
```
@@ -8100,7 +8126,7 @@ func InternalValue() DataDatabricksClusterClusterInfoGcpAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoInitScriptsAbfssOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoInitScriptsAbfssOutputReference
```
@@ -8378,7 +8404,7 @@ func InternalValue() DataDatabricksClusterClusterInfoInitScriptsAbfss
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoInitScriptsDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoInitScriptsDbfsOutputReference
```
@@ -8649,7 +8675,7 @@ func InternalValue() DataDatabricksClusterClusterInfoInitScriptsDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoInitScriptsFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoInitScriptsFileOutputReference
```
@@ -8927,7 +8953,7 @@ func InternalValue() DataDatabricksClusterClusterInfoInitScriptsFile
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoInitScriptsGcsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoInitScriptsGcsOutputReference
```
@@ -9205,7 +9231,7 @@ func InternalValue() DataDatabricksClusterClusterInfoInitScriptsGcs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoInitScriptsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksClusterClusterInfoInitScriptsList
```
@@ -9246,6 +9272,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -9253,6 +9280,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -9348,7 +9391,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoInitScriptsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksClusterClusterInfoInitScriptsOutputReference
```
@@ -9909,7 +9952,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoInitScriptsS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoInitScriptsS3OutputReference
```
@@ -10354,7 +10397,7 @@ func InternalValue() DataDatabricksClusterClusterInfoInitScriptsS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoInitScriptsVolumesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoInitScriptsVolumesOutputReference
```
@@ -10632,7 +10675,7 @@ func InternalValue() DataDatabricksClusterClusterInfoInitScriptsVolumes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoInitScriptsWorkspaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoInitScriptsWorkspaceOutputReference
```
@@ -10910,7 +10953,7 @@ func InternalValue() DataDatabricksClusterClusterInfoInitScriptsWorkspace
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoOutputReference
```
@@ -12528,7 +12571,7 @@ func InternalValue() DataDatabricksClusterClusterInfo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscluster"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscluster"
datadatabrickscluster.NewDataDatabricksClusterClusterInfoTerminationReasonOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksClusterClusterInfoTerminationReasonOutputReference
```
diff --git a/docs/dataDatabricksCluster.java.md b/docs/dataDatabricksCluster.java.md
index ec7d16218..a0ee07c5a 100644
--- a/docs/dataDatabricksCluster.java.md
+++ b/docs/dataDatabricksCluster.java.md
@@ -153,6 +153,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -223,6 +224,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
@@ -6926,6 +6935,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -6933,6 +6943,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -9358,6 +9384,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -9365,6 +9392,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/dataDatabricksCluster.python.md b/docs/dataDatabricksCluster.python.md
index 54f415e10..cce47f0ef 100644
--- a/docs/dataDatabricksCluster.python.md
+++ b/docs/dataDatabricksCluster.python.md
@@ -151,6 +151,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -226,6 +227,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
@@ -7709,6 +7718,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -7716,6 +7726,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -10330,6 +10358,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -10337,6 +10366,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/dataDatabricksCluster.typescript.md b/docs/dataDatabricksCluster.typescript.md
index e101712aa..9405d5803 100644
--- a/docs/dataDatabricksCluster.typescript.md
+++ b/docs/dataDatabricksCluster.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -124,6 +125,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
@@ -6661,6 +6670,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -6668,6 +6678,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -9093,6 +9119,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -9100,6 +9127,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/dataDatabricksClusterPolicy.csharp.md b/docs/dataDatabricksClusterPolicy.csharp.md
index fbfd34a13..9733315b7 100644
--- a/docs/dataDatabricksClusterPolicy.csharp.md
+++ b/docs/dataDatabricksClusterPolicy.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -127,6 +128,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksClusterPolicy.go.md b/docs/dataDatabricksClusterPolicy.go.md
index 6d52100e5..e74bb928e 100644
--- a/docs/dataDatabricksClusterPolicy.go.md
+++ b/docs/dataDatabricksClusterPolicy.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusterpolicy"
datadatabricksclusterpolicy.NewDataDatabricksClusterPolicy(scope Construct, id *string, config DataDatabricksClusterPolicyConfig) DataDatabricksClusterPolicy
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -127,6 +128,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -323,7 +332,7 @@ func ResetPolicyFamilyId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusterpolicy"
datadatabricksclusterpolicy.DataDatabricksClusterPolicy_IsConstruct(x interface{}) *bool
```
@@ -355,7 +364,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusterpolicy"
datadatabricksclusterpolicy.DataDatabricksClusterPolicy_IsTerraformElement(x interface{}) *bool
```
@@ -369,7 +378,7 @@ datadatabricksclusterpolicy.DataDatabricksClusterPolicy_IsTerraformElement(x int
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusterpolicy"
datadatabricksclusterpolicy.DataDatabricksClusterPolicy_IsTerraformDataSource(x interface{}) *bool
```
@@ -383,7 +392,7 @@ datadatabricksclusterpolicy.DataDatabricksClusterPolicy_IsTerraformDataSource(x
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusterpolicy"
datadatabricksclusterpolicy.DataDatabricksClusterPolicy_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -766,7 +775,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusterpolicy"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusterpolicy"
&datadatabricksclusterpolicy.DataDatabricksClusterPolicyConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksClusterPolicy.java.md b/docs/dataDatabricksClusterPolicy.java.md
index 35da9fbdb..134c97663 100644
--- a/docs/dataDatabricksClusterPolicy.java.md
+++ b/docs/dataDatabricksClusterPolicy.java.md
@@ -192,6 +192,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -265,6 +266,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksClusterPolicy.python.md b/docs/dataDatabricksClusterPolicy.python.md
index fc32ee9c1..02b4c15c9 100644
--- a/docs/dataDatabricksClusterPolicy.python.md
+++ b/docs/dataDatabricksClusterPolicy.python.md
@@ -189,6 +189,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -267,6 +268,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksClusterPolicy.typescript.md b/docs/dataDatabricksClusterPolicy.typescript.md
index 0992f25fd..6edf1a8cf 100644
--- a/docs/dataDatabricksClusterPolicy.typescript.md
+++ b/docs/dataDatabricksClusterPolicy.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -127,6 +128,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksClusters.csharp.md b/docs/dataDatabricksClusters.csharp.md
index f548619ed..42a0ee447 100644
--- a/docs/dataDatabricksClusters.csharp.md
+++ b/docs/dataDatabricksClusters.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksClusters.go.md b/docs/dataDatabricksClusters.go.md
index 2ae9edb02..0cd2538a5 100644
--- a/docs/dataDatabricksClusters.go.md
+++ b/docs/dataDatabricksClusters.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusters"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusters"
datadatabricksclusters.NewDataDatabricksClusters(scope Construct, id *string, config DataDatabricksClustersConfig) DataDatabricksClusters
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusters"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusters"
datadatabricksclusters.DataDatabricksClusters_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusters"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusters"
datadatabricksclusters.DataDatabricksClusters_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksclusters.DataDatabricksClusters_IsTerraformElement(x interface{})
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusters"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusters"
datadatabricksclusters.DataDatabricksClusters_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksclusters.DataDatabricksClusters_IsTerraformDataSource(x interface{
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusters"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusters"
datadatabricksclusters.DataDatabricksClusters_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -603,7 +612,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksclusters"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksclusters"
&datadatabricksclusters.DataDatabricksClustersConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksClusters.java.md b/docs/dataDatabricksClusters.java.md
index ec0a16157..8244be9e2 100644
--- a/docs/dataDatabricksClusters.java.md
+++ b/docs/dataDatabricksClusters.java.md
@@ -131,6 +131,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -198,6 +199,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksClusters.python.md b/docs/dataDatabricksClusters.python.md
index 2e7731708..1c50fb056 100644
--- a/docs/dataDatabricksClusters.python.md
+++ b/docs/dataDatabricksClusters.python.md
@@ -129,6 +129,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -201,6 +202,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksClusters.typescript.md b/docs/dataDatabricksClusters.typescript.md
index bfc1e7794..c2ee281f8 100644
--- a/docs/dataDatabricksClusters.typescript.md
+++ b/docs/dataDatabricksClusters.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksCurrentConfig.csharp.md b/docs/dataDatabricksCurrentConfig.csharp.md
index 3565fc464..981737e15 100644
--- a/docs/dataDatabricksCurrentConfig.csharp.md
+++ b/docs/dataDatabricksCurrentConfig.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -125,6 +126,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksCurrentConfig.go.md b/docs/dataDatabricksCurrentConfig.go.md
index 899d3ab64..8dfb05221 100644
--- a/docs/dataDatabricksCurrentConfig.go.md
+++ b/docs/dataDatabricksCurrentConfig.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentconfig"
datadatabrickscurrentconfig.NewDataDatabricksCurrentConfig(scope Construct, id *string, config DataDatabricksCurrentConfigConfig) DataDatabricksCurrentConfig
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -125,6 +126,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -309,7 +318,7 @@ func ResetIsAccount()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentconfig"
datadatabrickscurrentconfig.DataDatabricksCurrentConfig_IsConstruct(x interface{}) *bool
```
@@ -341,7 +350,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentconfig"
datadatabrickscurrentconfig.DataDatabricksCurrentConfig_IsTerraformElement(x interface{}) *bool
```
@@ -355,7 +364,7 @@ datadatabrickscurrentconfig.DataDatabricksCurrentConfig_IsTerraformElement(x int
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentconfig"
datadatabrickscurrentconfig.DataDatabricksCurrentConfig_IsTerraformDataSource(x interface{}) *bool
```
@@ -369,7 +378,7 @@ datadatabrickscurrentconfig.DataDatabricksCurrentConfig_IsTerraformDataSource(x
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentconfig"
datadatabrickscurrentconfig.DataDatabricksCurrentConfig_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -708,7 +717,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentconfig"
&datadatabrickscurrentconfig.DataDatabricksCurrentConfigConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksCurrentConfig.java.md b/docs/dataDatabricksCurrentConfig.java.md
index 57239f882..cc789d7fa 100644
--- a/docs/dataDatabricksCurrentConfig.java.md
+++ b/docs/dataDatabricksCurrentConfig.java.md
@@ -172,6 +172,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -243,6 +244,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksCurrentConfig.python.md b/docs/dataDatabricksCurrentConfig.python.md
index c93a42a5c..04abdec17 100644
--- a/docs/dataDatabricksCurrentConfig.python.md
+++ b/docs/dataDatabricksCurrentConfig.python.md
@@ -169,6 +169,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -245,6 +246,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksCurrentConfig.typescript.md b/docs/dataDatabricksCurrentConfig.typescript.md
index 0ed1e209d..45f43c62a 100644
--- a/docs/dataDatabricksCurrentConfig.typescript.md
+++ b/docs/dataDatabricksCurrentConfig.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -125,6 +126,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksCurrentUser.csharp.md b/docs/dataDatabricksCurrentUser.csharp.md
index 4a9bc015b..c05ac64c7 100644
--- a/docs/dataDatabricksCurrentUser.csharp.md
+++ b/docs/dataDatabricksCurrentUser.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksCurrentUser.go.md b/docs/dataDatabricksCurrentUser.go.md
index cb0037fc1..379fe724c 100644
--- a/docs/dataDatabricksCurrentUser.go.md
+++ b/docs/dataDatabricksCurrentUser.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentuser"
datadatabrickscurrentuser.NewDataDatabricksCurrentUser(scope Construct, id *string, config DataDatabricksCurrentUserConfig) DataDatabricksCurrentUser
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -274,7 +283,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentuser"
datadatabrickscurrentuser.DataDatabricksCurrentUser_IsConstruct(x interface{}) *bool
```
@@ -306,7 +315,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentuser"
datadatabrickscurrentuser.DataDatabricksCurrentUser_IsTerraformElement(x interface{}) *bool
```
@@ -320,7 +329,7 @@ datadatabrickscurrentuser.DataDatabricksCurrentUser_IsTerraformElement(x interfa
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentuser"
datadatabrickscurrentuser.DataDatabricksCurrentUser_IsTerraformDataSource(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabrickscurrentuser.DataDatabricksCurrentUser_IsTerraformDataSource(x inte
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentuser"
datadatabrickscurrentuser.DataDatabricksCurrentUser_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -640,7 +649,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickscurrentuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickscurrentuser"
&datadatabrickscurrentuser.DataDatabricksCurrentUserConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksCurrentUser.java.md b/docs/dataDatabricksCurrentUser.java.md
index 93f20d7d1..56ea41379 100644
--- a/docs/dataDatabricksCurrentUser.java.md
+++ b/docs/dataDatabricksCurrentUser.java.md
@@ -121,6 +121,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -187,6 +188,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksCurrentUser.python.md b/docs/dataDatabricksCurrentUser.python.md
index 0fb2ad6f5..d21380eb7 100644
--- a/docs/dataDatabricksCurrentUser.python.md
+++ b/docs/dataDatabricksCurrentUser.python.md
@@ -119,6 +119,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -190,6 +191,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksCurrentUser.typescript.md b/docs/dataDatabricksCurrentUser.typescript.md
index 4e34bfa78..3462388b5 100644
--- a/docs/dataDatabricksCurrentUser.typescript.md
+++ b/docs/dataDatabricksCurrentUser.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksDbfsFile.csharp.md b/docs/dataDatabricksDbfsFile.csharp.md
index dfe5ddf87..9ec743f16 100644
--- a/docs/dataDatabricksDbfsFile.csharp.md
+++ b/docs/dataDatabricksDbfsFile.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksDbfsFile.go.md b/docs/dataDatabricksDbfsFile.go.md
index 0f634e81d..372fc2095 100644
--- a/docs/dataDatabricksDbfsFile.go.md
+++ b/docs/dataDatabricksDbfsFile.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfile"
datadatabricksdbfsfile.NewDataDatabricksDbfsFile(scope Construct, id *string, config DataDatabricksDbfsFileConfig) DataDatabricksDbfsFile
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -274,7 +283,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfile"
datadatabricksdbfsfile.DataDatabricksDbfsFile_IsConstruct(x interface{}) *bool
```
@@ -306,7 +315,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfile"
datadatabricksdbfsfile.DataDatabricksDbfsFile_IsTerraformElement(x interface{}) *bool
```
@@ -320,7 +329,7 @@ datadatabricksdbfsfile.DataDatabricksDbfsFile_IsTerraformElement(x interface{})
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfile"
datadatabricksdbfsfile.DataDatabricksDbfsFile_IsTerraformDataSource(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabricksdbfsfile.DataDatabricksDbfsFile_IsTerraformDataSource(x interface{
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfile"
datadatabricksdbfsfile.DataDatabricksDbfsFile_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -629,7 +638,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfile"
&datadatabricksdbfsfile.DataDatabricksDbfsFileConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksDbfsFile.java.md b/docs/dataDatabricksDbfsFile.java.md
index 7821e7745..c015a7125 100644
--- a/docs/dataDatabricksDbfsFile.java.md
+++ b/docs/dataDatabricksDbfsFile.java.md
@@ -142,6 +142,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -208,6 +209,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksDbfsFile.python.md b/docs/dataDatabricksDbfsFile.python.md
index cf7a682c4..0bb72c903 100644
--- a/docs/dataDatabricksDbfsFile.python.md
+++ b/docs/dataDatabricksDbfsFile.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -210,6 +211,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksDbfsFile.typescript.md b/docs/dataDatabricksDbfsFile.typescript.md
index b602060e8..6d977d42c 100644
--- a/docs/dataDatabricksDbfsFile.typescript.md
+++ b/docs/dataDatabricksDbfsFile.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksDbfsFilePaths.csharp.md b/docs/dataDatabricksDbfsFilePaths.csharp.md
index 12be910f3..a609f17d2 100644
--- a/docs/dataDatabricksDbfsFilePaths.csharp.md
+++ b/docs/dataDatabricksDbfsFilePaths.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
@@ -821,6 +830,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -828,6 +838,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/dataDatabricksDbfsFilePaths.go.md b/docs/dataDatabricksDbfsFilePaths.go.md
index f914d4f8b..1c0d63e2b 100644
--- a/docs/dataDatabricksDbfsFilePaths.go.md
+++ b/docs/dataDatabricksDbfsFilePaths.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfilepaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfilepaths"
datadatabricksdbfsfilepaths.NewDataDatabricksDbfsFilePaths(scope Construct, id *string, config DataDatabricksDbfsFilePathsConfig) DataDatabricksDbfsFilePaths
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -274,7 +283,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfilepaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfilepaths"
datadatabricksdbfsfilepaths.DataDatabricksDbfsFilePaths_IsConstruct(x interface{}) *bool
```
@@ -306,7 +315,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfilepaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfilepaths"
datadatabricksdbfsfilepaths.DataDatabricksDbfsFilePaths_IsTerraformElement(x interface{}) *bool
```
@@ -320,7 +329,7 @@ datadatabricksdbfsfilepaths.DataDatabricksDbfsFilePaths_IsTerraformElement(x int
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfilepaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfilepaths"
datadatabricksdbfsfilepaths.DataDatabricksDbfsFilePaths_IsTerraformDataSource(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabricksdbfsfilepaths.DataDatabricksDbfsFilePaths_IsTerraformDataSource(x
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfilepaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfilepaths"
datadatabricksdbfsfilepaths.DataDatabricksDbfsFilePaths_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -618,7 +627,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfilepaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfilepaths"
&datadatabricksdbfsfilepaths.DataDatabricksDbfsFilePathsConfig {
Connection: interface{},
@@ -765,7 +774,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfilepaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfilepaths"
&datadatabricksdbfsfilepaths.DataDatabricksDbfsFilePathsPathListStruct {
@@ -780,7 +789,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfilepaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfilepaths"
datadatabricksdbfsfilepaths.NewDataDatabricksDbfsFilePathsPathListStructList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksDbfsFilePathsPathListStructList
```
@@ -821,6 +830,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -828,6 +838,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -912,7 +938,7 @@ func Fqn() *string
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdbfsfilepaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdbfsfilepaths"
datadatabricksdbfsfilepaths.NewDataDatabricksDbfsFilePathsPathListStructOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksDbfsFilePathsPathListStructOutputReference
```
diff --git a/docs/dataDatabricksDbfsFilePaths.java.md b/docs/dataDatabricksDbfsFilePaths.java.md
index 73b692f4a..ebdb43215 100644
--- a/docs/dataDatabricksDbfsFilePaths.java.md
+++ b/docs/dataDatabricksDbfsFilePaths.java.md
@@ -142,6 +142,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -208,6 +209,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
@@ -913,6 +922,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -920,6 +930,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/dataDatabricksDbfsFilePaths.python.md b/docs/dataDatabricksDbfsFilePaths.python.md
index ff2d142b3..dfa55ecd2 100644
--- a/docs/dataDatabricksDbfsFilePaths.python.md
+++ b/docs/dataDatabricksDbfsFilePaths.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -210,6 +211,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
@@ -944,6 +953,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -951,6 +961,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/dataDatabricksDbfsFilePaths.typescript.md b/docs/dataDatabricksDbfsFilePaths.typescript.md
index 79ec74cc7..2ac1319ce 100644
--- a/docs/dataDatabricksDbfsFilePaths.typescript.md
+++ b/docs/dataDatabricksDbfsFilePaths.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
@@ -808,6 +817,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -815,6 +825,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/dataDatabricksDirectory.csharp.md b/docs/dataDatabricksDirectory.csharp.md
index e29032f54..5c4f918aa 100644
--- a/docs/dataDatabricksDirectory.csharp.md
+++ b/docs/dataDatabricksDirectory.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksDirectory.go.md b/docs/dataDatabricksDirectory.go.md
index f311e7f99..e78a7df04 100644
--- a/docs/dataDatabricksDirectory.go.md
+++ b/docs/dataDatabricksDirectory.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdirectory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdirectory"
datadatabricksdirectory.NewDataDatabricksDirectory(scope Construct, id *string, config DataDatabricksDirectoryConfig) DataDatabricksDirectory
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetObjectId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdirectory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdirectory"
datadatabricksdirectory.DataDatabricksDirectory_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdirectory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdirectory"
datadatabricksdirectory.DataDatabricksDirectory_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksdirectory.DataDatabricksDirectory_IsTerraformElement(x interface{}
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdirectory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdirectory"
datadatabricksdirectory.DataDatabricksDirectory_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksdirectory.DataDatabricksDirectory_IsTerraformDataSource(x interfac
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdirectory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdirectory"
datadatabricksdirectory.DataDatabricksDirectory_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -614,7 +623,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksdirectory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksdirectory"
&datadatabricksdirectory.DataDatabricksDirectoryConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksDirectory.java.md b/docs/dataDatabricksDirectory.java.md
index bf4c0f79f..48f5bbf95 100644
--- a/docs/dataDatabricksDirectory.java.md
+++ b/docs/dataDatabricksDirectory.java.md
@@ -141,6 +141,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -208,6 +209,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksDirectory.python.md b/docs/dataDatabricksDirectory.python.md
index dbb4d805d..1662d811f 100644
--- a/docs/dataDatabricksDirectory.python.md
+++ b/docs/dataDatabricksDirectory.python.md
@@ -139,6 +139,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -211,6 +212,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksDirectory.typescript.md b/docs/dataDatabricksDirectory.typescript.md
index 9ff7eb535..9d7b39892 100644
--- a/docs/dataDatabricksDirectory.typescript.md
+++ b/docs/dataDatabricksDirectory.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksGroup.csharp.md b/docs/dataDatabricksGroup.csharp.md
index f03af34a3..8550ca28d 100644
--- a/docs/dataDatabricksGroup.csharp.md
+++ b/docs/dataDatabricksGroup.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -133,6 +134,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksGroup.go.md b/docs/dataDatabricksGroup.go.md
index 00153ff22..f2c1f71d5 100644
--- a/docs/dataDatabricksGroup.go.md
+++ b/docs/dataDatabricksGroup.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksgroup"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksgroup"
datadatabricksgroup.NewDataDatabricksGroup(scope Construct, id *string, config DataDatabricksGroupConfig) DataDatabricksGroup
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -133,6 +134,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -365,7 +374,7 @@ func ResetWorkspaceAccess()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksgroup"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksgroup"
datadatabricksgroup.DataDatabricksGroup_IsConstruct(x interface{}) *bool
```
@@ -397,7 +406,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksgroup"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksgroup"
datadatabricksgroup.DataDatabricksGroup_IsTerraformElement(x interface{}) *bool
```
@@ -411,7 +420,7 @@ datadatabricksgroup.DataDatabricksGroup_IsTerraformElement(x interface{}) *bool
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksgroup"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksgroup"
datadatabricksgroup.DataDatabricksGroup_IsTerraformDataSource(x interface{}) *bool
```
@@ -425,7 +434,7 @@ datadatabricksgroup.DataDatabricksGroup_IsTerraformDataSource(x interface{}) *bo
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksgroup"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksgroup"
datadatabricksgroup.DataDatabricksGroup_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -962,7 +971,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksgroup"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksgroup"
&datadatabricksgroup.DataDatabricksGroupConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksGroup.java.md b/docs/dataDatabricksGroup.java.md
index 36de98b39..70026ce39 100644
--- a/docs/dataDatabricksGroup.java.md
+++ b/docs/dataDatabricksGroup.java.md
@@ -266,6 +266,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -345,6 +346,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksGroup.python.md b/docs/dataDatabricksGroup.python.md
index 674f8b144..7b4dab5bb 100644
--- a/docs/dataDatabricksGroup.python.md
+++ b/docs/dataDatabricksGroup.python.md
@@ -259,6 +259,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -343,6 +344,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksGroup.typescript.md b/docs/dataDatabricksGroup.typescript.md
index 8fe49154f..9bbaef3fc 100644
--- a/docs/dataDatabricksGroup.typescript.md
+++ b/docs/dataDatabricksGroup.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -133,6 +134,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksInstancePool.csharp.md b/docs/dataDatabricksInstancePool.csharp.md
index 247dcb67d..c0d7b7c0d 100644
--- a/docs/dataDatabricksInstancePool.csharp.md
+++ b/docs/dataDatabricksInstancePool.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
@@ -3941,6 +3950,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -3948,6 +3958,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -4395,6 +4421,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -4402,6 +4429,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -6065,6 +6108,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -6072,6 +6116,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/dataDatabricksInstancePool.go.md b/docs/dataDatabricksInstancePool.go.md
index 79ffc5211..88063db6f 100644
--- a/docs/dataDatabricksInstancePool.go.md
+++ b/docs/dataDatabricksInstancePool.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePool(scope Construct, id *string, config DataDatabricksInstancePoolConfig) DataDatabricksInstancePool
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -294,7 +303,7 @@ func ResetPoolInfo()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.DataDatabricksInstancePool_IsConstruct(x interface{}) *bool
```
@@ -326,7 +335,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.DataDatabricksInstancePool_IsTerraformElement(x interface{}) *bool
```
@@ -340,7 +349,7 @@ datadatabricksinstancepool.DataDatabricksInstancePool_IsTerraformElement(x inter
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.DataDatabricksInstancePool_IsTerraformDataSource(x interface{}) *bool
```
@@ -354,7 +363,7 @@ datadatabricksinstancepool.DataDatabricksInstancePool_IsTerraformDataSource(x in
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.DataDatabricksInstancePool_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -627,7 +636,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolConfig {
Connection: interface{},
@@ -639,7 +648,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
Provisioners: *[]interface{},
Name: *string,
Id: *string,
- PoolInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfo,
+ PoolInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfo,
}
```
@@ -776,18 +785,18 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfo {
IdleInstanceAutoterminationMinutes: *f64,
InstancePoolName: *string,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoAzureAttributes,
CustomTags: *map[string]*string,
DefaultTags: *map[string]*string,
- DiskSpec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoDiskSpec,
+ DiskSpec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoDiskSpec,
EnableElasticDisk: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoGcpAttributes,
InstancePoolFleetAttributes: interface{},
InstancePoolId: *string,
MaxCapacity: *f64,
@@ -796,7 +805,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
PreloadedDockerImage: interface{},
PreloadedSparkVersions: *[]*string,
State: *string,
- Stats: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoStats,
+ Stats: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoStats,
}
```
@@ -1060,7 +1069,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoAwsAttributes {
Availability: *string,
@@ -1120,7 +1129,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoAzureAttributes {
Availability: *string,
@@ -1166,12 +1175,12 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoDiskSpec {
DiskCount: *f64,
DiskSize: *f64,
- DiskType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoDiskSpecDiskType,
+ DiskType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoDiskSpecDiskType,
}
```
@@ -1228,7 +1237,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoDiskSpecDiskType {
AzureDiskVolumeType: *string,
@@ -1274,7 +1283,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoGcpAttributes {
GcpAvailability: *string,
@@ -1320,12 +1329,12 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributes {
LaunchTemplateOverride: interface{},
- FleetOnDemandOption: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOption,
- FleetSpotOption: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOption,
+ FleetOnDemandOption: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOption,
+ FleetSpotOption: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOption,
}
```
@@ -1386,7 +1395,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOption {
AllocationStrategy: *string,
@@ -1432,7 +1441,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOption {
AllocationStrategy: *string,
@@ -1478,7 +1487,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverride {
AvailabilityZone: *string,
@@ -1524,11 +1533,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoPreloadedDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksInstancePool.DataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuth,
}
```
@@ -1572,7 +1581,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuth {
Password: *string,
@@ -1618,7 +1627,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
&datadatabricksinstancepool.DataDatabricksInstancePoolPoolInfoStats {
IdleCount: *f64,
@@ -1694,7 +1703,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoAwsAttributesOutputReference
```
@@ -2030,7 +2039,7 @@ func InternalValue() DataDatabricksInstancePoolPoolInfoAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoAzureAttributesOutputReference
```
@@ -2337,7 +2346,7 @@ func InternalValue() DataDatabricksInstancePoolPoolInfoAzureAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoDiskSpecDiskTypeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoDiskSpecDiskTypeOutputReference
```
@@ -2644,7 +2653,7 @@ func InternalValue() DataDatabricksInstancePoolPoolInfoDiskSpecDiskType
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoDiskSpecOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoDiskSpecOutputReference
```
@@ -2993,7 +3002,7 @@ func InternalValue() DataDatabricksInstancePoolPoolInfoDiskSpec
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoGcpAttributesOutputReference
```
@@ -3300,7 +3309,7 @@ func InternalValue() DataDatabricksInstancePoolPoolInfoGcpAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOptionOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOptionOutputReference
```
@@ -3600,7 +3609,7 @@ func InternalValue() DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttribut
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOptionOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOptionOutputReference
```
@@ -3900,7 +3909,7 @@ func InternalValue() DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttribut
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverrideList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverrideList
```
@@ -3941,6 +3950,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -3948,6 +3958,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -4043,7 +4069,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverrideOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverrideOutputReference
```
@@ -4354,7 +4380,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesList
```
@@ -4395,6 +4421,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -4402,6 +4429,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -4497,7 +4540,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesOutputReference
```
@@ -4883,7 +4926,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoOutputReference
```
@@ -5731,7 +5774,7 @@ func InternalValue() DataDatabricksInstancePoolPoolInfo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuthOutputReference
```
@@ -6024,7 +6067,7 @@ func InternalValue() DataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasic
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoPreloadedDockerImageList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksInstancePoolPoolInfoPreloadedDockerImageList
```
@@ -6065,6 +6108,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -6072,6 +6116,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -6167,7 +6227,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoPreloadedDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksInstancePoolPoolInfoPreloadedDockerImageOutputReference
```
@@ -6498,7 +6558,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstancepool"
datadatabricksinstancepool.NewDataDatabricksInstancePoolPoolInfoStatsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksInstancePoolPoolInfoStatsOutputReference
```
diff --git a/docs/dataDatabricksInstancePool.java.md b/docs/dataDatabricksInstancePool.java.md
index 665ab3fa7..f719712d2 100644
--- a/docs/dataDatabricksInstancePool.java.md
+++ b/docs/dataDatabricksInstancePool.java.md
@@ -143,6 +143,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -211,6 +212,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
@@ -4038,6 +4047,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -4045,6 +4055,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -4492,6 +4518,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -4499,6 +4526,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -6162,6 +6205,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -6169,6 +6213,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/dataDatabricksInstancePool.python.md b/docs/dataDatabricksInstancePool.python.md
index dd34c22b3..a94ae6e63 100644
--- a/docs/dataDatabricksInstancePool.python.md
+++ b/docs/dataDatabricksInstancePool.python.md
@@ -141,6 +141,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -214,6 +215,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
@@ -4427,6 +4436,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -4434,6 +4444,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -4916,6 +4944,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -4923,6 +4952,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -6806,6 +6853,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -6813,6 +6861,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/dataDatabricksInstancePool.typescript.md b/docs/dataDatabricksInstancePool.typescript.md
index 3969257b4..bcc2ccca7 100644
--- a/docs/dataDatabricksInstancePool.typescript.md
+++ b/docs/dataDatabricksInstancePool.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
@@ -3870,6 +3879,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -3877,6 +3887,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -4324,6 +4350,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -4331,6 +4358,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -5994,6 +6037,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -6001,6 +6045,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/dataDatabricksInstanceProfiles.csharp.md b/docs/dataDatabricksInstanceProfiles.csharp.md
index fe8f085d5..189d14ee7 100644
--- a/docs/dataDatabricksInstanceProfiles.csharp.md
+++ b/docs/dataDatabricksInstanceProfiles.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
@@ -857,6 +866,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -864,6 +874,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/dataDatabricksInstanceProfiles.go.md b/docs/dataDatabricksInstanceProfiles.go.md
index e68708746..e5da921c8 100644
--- a/docs/dataDatabricksInstanceProfiles.go.md
+++ b/docs/dataDatabricksInstanceProfiles.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstanceprofiles"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstanceprofiles"
datadatabricksinstanceprofiles.NewDataDatabricksInstanceProfiles(scope Construct, id *string, config DataDatabricksInstanceProfilesConfig) DataDatabricksInstanceProfiles
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -294,7 +303,7 @@ func ResetInstanceProfiles()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstanceprofiles"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstanceprofiles"
datadatabricksinstanceprofiles.DataDatabricksInstanceProfiles_IsConstruct(x interface{}) *bool
```
@@ -326,7 +335,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstanceprofiles"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstanceprofiles"
datadatabricksinstanceprofiles.DataDatabricksInstanceProfiles_IsTerraformElement(x interface{}) *bool
```
@@ -340,7 +349,7 @@ datadatabricksinstanceprofiles.DataDatabricksInstanceProfiles_IsTerraformElement
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstanceprofiles"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstanceprofiles"
datadatabricksinstanceprofiles.DataDatabricksInstanceProfiles_IsTerraformDataSource(x interface{}) *bool
```
@@ -354,7 +363,7 @@ datadatabricksinstanceprofiles.DataDatabricksInstanceProfiles_IsTerraformDataSou
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstanceprofiles"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstanceprofiles"
datadatabricksinstanceprofiles.DataDatabricksInstanceProfiles_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -605,7 +614,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstanceprofiles"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstanceprofiles"
&datadatabricksinstanceprofiles.DataDatabricksInstanceProfilesConfig {
Connection: interface{},
@@ -740,7 +749,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstanceprofiles"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstanceprofiles"
&datadatabricksinstanceprofiles.DataDatabricksInstanceProfilesInstanceProfiles {
Arn: *string,
@@ -816,7 +825,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstanceprofiles"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstanceprofiles"
datadatabricksinstanceprofiles.NewDataDatabricksInstanceProfilesInstanceProfilesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksInstanceProfilesInstanceProfilesList
```
@@ -857,6 +866,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -864,6 +874,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -959,7 +985,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksinstanceprofiles"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksinstanceprofiles"
datadatabricksinstanceprofiles.NewDataDatabricksInstanceProfilesInstanceProfilesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksInstanceProfilesInstanceProfilesOutputReference
```
diff --git a/docs/dataDatabricksInstanceProfiles.java.md b/docs/dataDatabricksInstanceProfiles.java.md
index 43be52c3e..3cb7bd093 100644
--- a/docs/dataDatabricksInstanceProfiles.java.md
+++ b/docs/dataDatabricksInstanceProfiles.java.md
@@ -134,6 +134,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -202,6 +203,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
@@ -943,6 +952,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -950,6 +960,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/dataDatabricksInstanceProfiles.python.md b/docs/dataDatabricksInstanceProfiles.python.md
index 7b79f9489..452249abe 100644
--- a/docs/dataDatabricksInstanceProfiles.python.md
+++ b/docs/dataDatabricksInstanceProfiles.python.md
@@ -131,6 +131,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -204,6 +205,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
@@ -976,6 +985,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -983,6 +993,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/dataDatabricksInstanceProfiles.typescript.md b/docs/dataDatabricksInstanceProfiles.typescript.md
index 15daa97ad..3b234e65e 100644
--- a/docs/dataDatabricksInstanceProfiles.typescript.md
+++ b/docs/dataDatabricksInstanceProfiles.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
@@ -842,6 +851,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -849,6 +859,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/dataDatabricksJob.csharp.md b/docs/dataDatabricksJob.csharp.md
index b6d563258..aaa10571e 100644
--- a/docs/dataDatabricksJob.csharp.md
+++ b/docs/dataDatabricksJob.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -125,6 +126,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
@@ -11099,6 +11108,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -11106,6 +11116,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -14281,6 +14307,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -14288,6 +14315,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -14778,6 +14821,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -14785,6 +14829,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -17094,6 +17154,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -17101,6 +17162,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -20024,6 +20101,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -20031,6 +20109,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -24180,6 +24274,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -24187,6 +24282,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -27605,6 +27716,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -27612,6 +27724,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -30535,6 +30663,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -30542,6 +30671,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -36307,6 +36452,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -36314,6 +36460,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -40335,6 +40497,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -40342,6 +40505,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -41445,6 +41624,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -41452,6 +41632,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -42242,6 +42438,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -42249,6 +42446,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -43494,6 +43707,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -43501,6 +43715,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -45810,6 +46040,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -45817,6 +46048,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -48740,6 +48987,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -48747,6 +48995,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -56437,6 +56701,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -56444,6 +56709,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -57276,6 +57557,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -57283,6 +57565,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -58761,6 +59059,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -58768,6 +59067,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -59200,6 +59515,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -59207,6 +59523,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -59639,6 +59971,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -59646,6 +59979,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -60078,6 +60427,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -60085,6 +60435,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -61576,6 +61942,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -61583,6 +61950,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -62015,6 +62398,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62022,6 +62406,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -62454,6 +62854,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62461,6 +62862,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -62893,6 +63310,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62900,6 +63318,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/dataDatabricksJob.go.md b/docs/dataDatabricksJob.go.md
index be4bf94dc..63b60eb2f 100644
--- a/docs/dataDatabricksJob.go.md
+++ b/docs/dataDatabricksJob.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJob(scope Construct, id *string, config DataDatabricksJobConfig) DataDatabricksJob
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -125,6 +126,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -315,7 +324,7 @@ func ResetName()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.DataDatabricksJob_IsConstruct(x interface{}) *bool
```
@@ -347,7 +356,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.DataDatabricksJob_IsTerraformElement(x interface{}) *bool
```
@@ -361,7 +370,7 @@ datadatabricksjob.DataDatabricksJob_IsTerraformElement(x interface{}) *bool
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.DataDatabricksJob_IsTerraformDataSource(x interface{}) *bool
```
@@ -375,7 +384,7 @@ datadatabricksjob.DataDatabricksJob_IsTerraformDataSource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.DataDatabricksJob_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -692,7 +701,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobConfig {
Connection: interface{},
@@ -705,7 +714,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
Id: *string,
JobId: *string,
JobName: *string,
- JobSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettings,
+ JobSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettings,
Name: *string,
}
```
@@ -869,14 +878,14 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettings {
CreatedTime: *f64,
CreatorUserName: *string,
JobId: *f64,
RunAsUserName: *string,
- Settings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettings,
+ Settings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettings,
}
```
@@ -959,45 +968,45 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettings {
Compute: interface{},
- Continuous: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsContinuous,
- DbtTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsDbtTask,
- Deployment: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsDeployment,
+ Continuous: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsContinuous,
+ DbtTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsDbtTask,
+ Deployment: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsDeployment,
Description: *string,
EditMode: *string,
- EmailNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsEmailNotifications,
+ EmailNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsEmailNotifications,
ExistingClusterId: *string,
Format: *string,
- GitSource: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsGitSource,
- Health: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsHealth,
+ GitSource: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsGitSource,
+ Health: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsHealth,
JobCluster: interface{},
Library: interface{},
MaxConcurrentRuns: *f64,
MaxRetries: *f64,
MinRetryIntervalMillis: *f64,
Name: *string,
- NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewCluster,
- NotebookTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNotebookTask,
- NotificationSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNotificationSettings,
+ NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewCluster,
+ NotebookTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNotebookTask,
+ NotificationSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNotificationSettings,
Parameter: interface{},
- PipelineTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsPipelineTask,
- PythonWheelTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsPythonWheelTask,
- Queue: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsQueue,
+ PipelineTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsPipelineTask,
+ PythonWheelTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsPythonWheelTask,
+ Queue: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsQueue,
RetryOnTimeout: interface{},
- RunAs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsRunAs,
- RunJobTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsRunJobTask,
- Schedule: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsSchedule,
- SparkJarTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsSparkJarTask,
- SparkPythonTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsSparkPythonTask,
- SparkSubmitTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsSparkSubmitTask,
+ RunAs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsRunAs,
+ RunJobTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsRunJobTask,
+ Schedule: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsSchedule,
+ SparkJarTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsSparkJarTask,
+ SparkPythonTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsSparkPythonTask,
+ SparkSubmitTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsSparkSubmitTask,
Tags: *map[string]*string,
Task: interface{},
TimeoutSeconds: *f64,
- Trigger: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTrigger,
- WebhookNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsWebhookNotifications,
+ Trigger: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTrigger,
+ WebhookNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsWebhookNotifications,
}
```
@@ -1531,11 +1540,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsCompute {
ComputeKey: *string,
- Spec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsComputeSpec,
+ Spec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsComputeSpec,
}
```
@@ -1579,7 +1588,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsComputeSpec {
Kind: *string,
@@ -1611,7 +1620,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsContinuous {
PauseStatus: *string,
@@ -1643,7 +1652,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsDbtTask {
Commands: *[]*string,
@@ -1745,7 +1754,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsDeployment {
Kind: *string,
@@ -1791,7 +1800,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsEmailNotifications {
NoAlertForSkippedRuns: interface{},
@@ -1879,13 +1888,13 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsGitSource {
Url: *string,
Branch: *string,
Commit: *string,
- JobSource: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsGitSourceJobSource,
+ JobSource: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsGitSourceJobSource,
Provider: *string,
Tag: *string,
}
@@ -1983,7 +1992,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsGitSourceJobSource {
ImportFromGitBranch: *string,
@@ -2043,7 +2052,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsHealth {
Rules: interface{},
@@ -2077,7 +2086,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsHealthRules {
Metric: *string,
@@ -2137,11 +2146,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobCluster {
JobClusterKey: *string,
- NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewCluster,
+ NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewCluster,
}
```
@@ -2185,28 +2194,28 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewCluster {
NumWorkers: *f64,
SparkVersion: *string,
ApplyPolicyDefaultValues: interface{},
- Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscale,
+ Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscale,
AutoterminationMinutes: *f64,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributes,
ClusterId: *string,
- ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConf,
+ ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConf,
ClusterMountInfo: interface{},
ClusterName: *string,
CustomTags: *map[string]*string,
DataSecurityMode: *string,
- DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImage,
+ DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImage,
DriverInstancePoolId: *string,
DriverNodeTypeId: *string,
EnableElasticDisk: interface{},
EnableLocalDiskEncryption: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributes,
IdempotencyToken: *string,
InitScripts: interface{},
InstancePoolId: *string,
@@ -2217,7 +2226,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
SparkConf: *map[string]*string,
SparkEnvVars: *map[string]*string,
SshPublicKeys: *[]*string,
- WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadType,
+ WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadType,
}
```
@@ -2641,7 +2650,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscale {
MaxWorkers: *f64,
@@ -2687,7 +2696,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributes {
Availability: *string,
@@ -2817,7 +2826,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributes {
Availability: *string,
@@ -2877,11 +2886,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConf {
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3,
}
```
@@ -2927,7 +2936,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfs {
Destination: *string,
@@ -2959,7 +2968,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3 {
Destination: *string,
@@ -3075,11 +3084,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfo {
LocalMountDirPath: *string,
- NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfo,
+ NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfo,
RemoteMountDirPath: *string,
}
```
@@ -3137,7 +3146,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfo {
ServerAddress: *string,
@@ -3183,11 +3192,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuth,
}
```
@@ -3231,7 +3240,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuth {
Password: *string,
@@ -3277,7 +3286,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributes {
Availability: *string,
@@ -3379,16 +3388,16 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScripts {
- Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfss,
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfs,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFile,
- Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3,
- Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumes,
- Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspace,
+ Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfss,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfs,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFile,
+ Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3,
+ Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumes,
+ Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspace,
}
```
@@ -3509,7 +3518,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfss {
Destination: *string,
@@ -3541,7 +3550,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfs {
Destination: *string,
@@ -3573,7 +3582,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFile {
Destination: *string,
@@ -3605,7 +3614,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcs {
Destination: *string,
@@ -3637,7 +3646,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3 {
Destination: *string,
@@ -3753,7 +3762,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumes {
Destination: *string,
@@ -3785,7 +3794,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspace {
Destination: *string,
@@ -3817,10 +3826,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadType {
- Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClients,
+ Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClients,
}
```
@@ -3851,7 +3860,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClients {
Jobs: interface{},
@@ -3897,14 +3906,14 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsLibrary {
- Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsLibraryCran,
+ Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsLibraryCran,
Egg: *string,
Jar: *string,
- Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsLibraryMaven,
- Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsLibraryPypi,
+ Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsLibraryMaven,
+ Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsLibraryPypi,
Whl: *string,
}
```
@@ -4005,7 +4014,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsLibraryCran {
Package: *string,
@@ -4051,7 +4060,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsLibraryMaven {
Coordinates: *string,
@@ -4111,7 +4120,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsLibraryPypi {
Package: *string,
@@ -4157,28 +4166,28 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewCluster {
NumWorkers: *f64,
SparkVersion: *string,
ApplyPolicyDefaultValues: interface{},
- Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterAutoscale,
+ Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterAutoscale,
AutoterminationMinutes: *f64,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterAzureAttributes,
ClusterId: *string,
- ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConf,
+ ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConf,
ClusterMountInfo: interface{},
ClusterName: *string,
CustomTags: *map[string]*string,
DataSecurityMode: *string,
- DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterDockerImage,
+ DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterDockerImage,
DriverInstancePoolId: *string,
DriverNodeTypeId: *string,
EnableElasticDisk: interface{},
EnableLocalDiskEncryption: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterGcpAttributes,
IdempotencyToken: *string,
InitScripts: interface{},
InstancePoolId: *string,
@@ -4189,7 +4198,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
SparkConf: *map[string]*string,
SparkEnvVars: *map[string]*string,
SshPublicKeys: *[]*string,
- WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterWorkloadType,
+ WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterWorkloadType,
}
```
@@ -4613,7 +4622,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterAutoscale {
MaxWorkers: *f64,
@@ -4659,7 +4668,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterAwsAttributes {
Availability: *string,
@@ -4789,7 +4798,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterAzureAttributes {
Availability: *string,
@@ -4849,11 +4858,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConf {
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3,
}
```
@@ -4899,7 +4908,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfs {
Destination: *string,
@@ -4931,7 +4940,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3 {
Destination: *string,
@@ -5047,11 +5056,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfo {
LocalMountDirPath: *string,
- NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfo,
+ NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfo,
RemoteMountDirPath: *string,
}
```
@@ -5109,7 +5118,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfo {
ServerAddress: *string,
@@ -5155,11 +5164,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuth,
}
```
@@ -5203,7 +5212,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuth {
Password: *string,
@@ -5249,7 +5258,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterGcpAttributes {
Availability: *string,
@@ -5351,16 +5360,16 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterInitScripts {
- Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfss,
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfs,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFile,
- Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3,
- Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumes,
- Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspace,
+ Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfss,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfs,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFile,
+ Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3,
+ Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumes,
+ Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspace,
}
```
@@ -5481,7 +5490,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfss {
Destination: *string,
@@ -5513,7 +5522,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfs {
Destination: *string,
@@ -5545,7 +5554,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFile {
Destination: *string,
@@ -5577,7 +5586,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcs {
Destination: *string,
@@ -5609,7 +5618,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3 {
Destination: *string,
@@ -5725,7 +5734,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumes {
Destination: *string,
@@ -5757,7 +5766,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspace {
Destination: *string,
@@ -5789,10 +5798,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterWorkloadType {
- Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClients,
+ Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClients,
}
```
@@ -5823,7 +5832,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClients {
Jobs: interface{},
@@ -5869,7 +5878,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNotebookTask {
NotebookPath: *string,
@@ -5929,7 +5938,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsNotificationSettings {
NoAlertForCanceledRuns: interface{},
@@ -5975,7 +5984,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsParameter {
Default: *string,
@@ -6021,7 +6030,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsPipelineTask {
PipelineId: *string,
@@ -6067,7 +6076,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsPythonWheelTask {
EntryPoint: *string,
@@ -6141,7 +6150,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsQueue {
Enabled: interface{},
@@ -6173,7 +6182,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsRunAs {
ServicePrincipalName: *string,
@@ -6219,7 +6228,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsRunJobTask {
JobId: *f64,
@@ -6265,7 +6274,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsSchedule {
QuartzCronExpression: *string,
@@ -6325,7 +6334,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsSparkJarTask {
JarUri: *string,
@@ -6385,7 +6394,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsSparkPythonTask {
PythonFile: *string,
@@ -6445,7 +6454,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsSparkSubmitTask {
Parameters: *[]*string,
@@ -6477,36 +6486,36 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTask {
ComputeKey: *string,
- ConditionTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskConditionTask,
- DbtTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskDbtTask,
+ ConditionTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskConditionTask,
+ DbtTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskDbtTask,
DependsOn: interface{},
Description: *string,
- EmailNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskEmailNotifications,
+ EmailNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskEmailNotifications,
ExistingClusterId: *string,
- Health: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskHealth,
+ Health: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskHealth,
JobClusterKey: *string,
Library: interface{},
MaxRetries: *f64,
MinRetryIntervalMillis: *f64,
- NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewCluster,
- NotebookTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNotebookTask,
- NotificationSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNotificationSettings,
- PipelineTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskPipelineTask,
- PythonWheelTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskPythonWheelTask,
+ NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewCluster,
+ NotebookTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNotebookTask,
+ NotificationSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNotificationSettings,
+ PipelineTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskPipelineTask,
+ PythonWheelTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskPythonWheelTask,
RetryOnTimeout: interface{},
RunIf: *string,
- RunJobTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskRunJobTask,
- SparkJarTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSparkJarTask,
- SparkPythonTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSparkPythonTask,
- SparkSubmitTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSparkSubmitTask,
- SqlTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTask,
+ RunJobTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskRunJobTask,
+ SparkJarTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSparkJarTask,
+ SparkPythonTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSparkPythonTask,
+ SparkSubmitTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSparkSubmitTask,
+ SqlTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTask,
TaskKey: *string,
TimeoutSeconds: *f64,
- WebhookNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskWebhookNotifications,
+ WebhookNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskWebhookNotifications,
}
```
@@ -6907,7 +6916,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskConditionTask {
Left: *string,
@@ -6967,7 +6976,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskDbtTask {
Commands: *[]*string,
@@ -7069,7 +7078,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskDependsOn {
TaskKey: *string,
@@ -7115,7 +7124,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskEmailNotifications {
OnDurationWarningThresholdExceeded: *[]*string,
@@ -7189,7 +7198,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskHealth {
Rules: interface{},
@@ -7223,7 +7232,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskHealthRules {
Metric: *string,
@@ -7283,14 +7292,14 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskLibrary {
- Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskLibraryCran,
+ Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskLibraryCran,
Egg: *string,
Jar: *string,
- Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskLibraryMaven,
- Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskLibraryPypi,
+ Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskLibraryMaven,
+ Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskLibraryPypi,
Whl: *string,
}
```
@@ -7391,7 +7400,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskLibraryCran {
Package: *string,
@@ -7437,7 +7446,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskLibraryMaven {
Coordinates: *string,
@@ -7497,7 +7506,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskLibraryPypi {
Package: *string,
@@ -7543,28 +7552,28 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewCluster {
NumWorkers: *f64,
SparkVersion: *string,
ApplyPolicyDefaultValues: interface{},
- Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscale,
+ Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscale,
AutoterminationMinutes: *f64,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributes,
ClusterId: *string,
- ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConf,
+ ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConf,
ClusterMountInfo: interface{},
ClusterName: *string,
CustomTags: *map[string]*string,
DataSecurityMode: *string,
- DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImage,
+ DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImage,
DriverInstancePoolId: *string,
DriverNodeTypeId: *string,
EnableElasticDisk: interface{},
EnableLocalDiskEncryption: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributes,
IdempotencyToken: *string,
InitScripts: interface{},
InstancePoolId: *string,
@@ -7575,7 +7584,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
SparkConf: *map[string]*string,
SparkEnvVars: *map[string]*string,
SshPublicKeys: *[]*string,
- WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadType,
+ WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadType,
}
```
@@ -7999,7 +8008,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscale {
MaxWorkers: *f64,
@@ -8045,7 +8054,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributes {
Availability: *string,
@@ -8175,7 +8184,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributes {
Availability: *string,
@@ -8235,11 +8244,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConf {
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3,
}
```
@@ -8285,7 +8294,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfs {
Destination: *string,
@@ -8317,7 +8326,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3 {
Destination: *string,
@@ -8433,11 +8442,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfo {
LocalMountDirPath: *string,
- NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfo,
+ NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfo,
RemoteMountDirPath: *string,
}
```
@@ -8495,7 +8504,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfo {
ServerAddress: *string,
@@ -8541,11 +8550,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuth,
}
```
@@ -8589,7 +8598,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuth {
Password: *string,
@@ -8635,7 +8644,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributes {
Availability: *string,
@@ -8737,16 +8746,16 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScripts {
- Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfss,
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfs,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFile,
- Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3,
- Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumes,
- Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspace,
+ Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfss,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfs,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFile,
+ Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3,
+ Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumes,
+ Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspace,
}
```
@@ -8867,7 +8876,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfss {
Destination: *string,
@@ -8899,7 +8908,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfs {
Destination: *string,
@@ -8931,7 +8940,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFile {
Destination: *string,
@@ -8963,7 +8972,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcs {
Destination: *string,
@@ -8995,7 +9004,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3 {
Destination: *string,
@@ -9111,7 +9120,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumes {
Destination: *string,
@@ -9143,7 +9152,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspace {
Destination: *string,
@@ -9175,10 +9184,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadType {
- Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClients,
+ Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClients,
}
```
@@ -9209,7 +9218,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClients {
Jobs: interface{},
@@ -9255,7 +9264,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNotebookTask {
NotebookPath: *string,
@@ -9315,7 +9324,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskNotificationSettings {
AlertOnLastAttempt: interface{},
@@ -9375,7 +9384,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskPipelineTask {
PipelineId: *string,
@@ -9421,7 +9430,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskPythonWheelTask {
EntryPoint: *string,
@@ -9495,7 +9504,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskRunJobTask {
JobId: *f64,
@@ -9541,7 +9550,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSparkJarTask {
JarUri: *string,
@@ -9601,7 +9610,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSparkPythonTask {
PythonFile: *string,
@@ -9661,7 +9670,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSparkSubmitTask {
Parameters: *[]*string,
@@ -9693,14 +9702,14 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSqlTask {
- Alert: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlert,
- Dashboard: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboard,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskFile,
+ Alert: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlert,
+ Dashboard: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboard,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskFile,
Parameters: *map[string]*string,
- Query: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskQuery,
+ Query: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskQuery,
WarehouseId: *string,
}
```
@@ -9803,7 +9812,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlert {
AlertId: *string,
@@ -9865,7 +9874,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptions {
DestinationId: *string,
@@ -9911,7 +9920,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboard {
DashboardId: *string,
@@ -9987,7 +9996,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptions {
DestinationId: *string,
@@ -10033,7 +10042,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskFile {
Path: *string,
@@ -10065,7 +10074,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskSqlTaskQuery {
QueryId: *string,
@@ -10097,7 +10106,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskWebhookNotifications {
OnDurationWarningThresholdExceeded: interface{},
@@ -10179,7 +10188,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceeded {
Id: *string,
@@ -10214,7 +10223,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailure {
Id: *string,
@@ -10249,7 +10258,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStart {
Id: *string,
@@ -10284,7 +10293,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess {
Id: *string,
@@ -10319,10 +10328,10 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTrigger {
- FileArrival: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTriggerFileArrival,
+ FileArrival: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksJob.DataDatabricksJobJobSettingsSettingsTriggerFileArrival,
PauseStatus: *string,
}
```
@@ -10367,7 +10376,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsTriggerFileArrival {
Url: *string,
@@ -10427,7 +10436,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsWebhookNotifications {
OnDurationWarningThresholdExceeded: interface{},
@@ -10509,7 +10518,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded {
Id: *string,
@@ -10544,7 +10553,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailure {
Id: *string,
@@ -10579,7 +10588,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStart {
Id: *string,
@@ -10614,7 +10623,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
&datadatabricksjob.DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccess {
Id: *string,
@@ -10651,7 +10660,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsOutputReference
```
@@ -11058,7 +11067,7 @@ func InternalValue() DataDatabricksJobJobSettings
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsComputeList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsComputeList
```
@@ -11099,6 +11108,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -11106,6 +11116,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -11201,7 +11227,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsComputeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsComputeOutputReference
```
@@ -11539,7 +11565,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsComputeSpecOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsComputeSpecOutputReference
```
@@ -11817,7 +11843,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsComputeSpec
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsContinuousOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsContinuousOutputReference
```
@@ -12095,7 +12121,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsContinuous
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsDbtTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsDbtTaskOutputReference
```
@@ -12511,7 +12537,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsDbtTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsDeploymentOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsDeploymentOutputReference
```
@@ -12811,7 +12837,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsDeployment
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsEmailNotificationsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsEmailNotificationsOutputReference
```
@@ -13205,7 +13231,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsEmailNotifications
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsGitSourceJobSourceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsGitSourceJobSourceOutputReference
```
@@ -13527,7 +13553,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsGitSourceJobSource
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsGitSourceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsGitSourceOutputReference
```
@@ -13956,7 +13982,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsGitSource
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsHealthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsHealthOutputReference
```
@@ -14240,7 +14266,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsHealth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsHealthRulesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsHealthRulesList
```
@@ -14281,6 +14307,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -14288,6 +14315,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -14383,7 +14426,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsHealthRulesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsHealthRulesOutputReference
```
@@ -14737,7 +14780,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsJobClusterList
```
@@ -14778,6 +14821,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -14785,6 +14829,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -14880,7 +14940,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscaleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscaleOutputReference
```
@@ -15187,7 +15247,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAut
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributesOutputReference
```
@@ -15668,7 +15728,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAws
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributesOutputReference
```
@@ -16004,7 +16064,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzu
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfsOutputReference
```
@@ -16275,7 +16335,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClu
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfOutputReference
```
@@ -16608,7 +16668,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClu
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3OutputReference
```
@@ -17053,7 +17113,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClu
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoList
```
@@ -17094,6 +17154,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -17101,6 +17162,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -17196,7 +17273,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference
```
@@ -17496,7 +17573,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClu
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoOutputReference
```
@@ -17849,7 +17926,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuthOutputReference
```
@@ -18142,7 +18219,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDoc
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageOutputReference
```
@@ -18455,7 +18532,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDoc
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributesOutputReference
```
@@ -18878,7 +18955,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcp
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfssOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfssOutputReference
```
@@ -19156,7 +19233,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterIni
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfsOutputReference
```
@@ -19427,7 +19504,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterIni
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFileOutputReference
```
@@ -19705,7 +19782,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterIni
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcsOutputReference
```
@@ -19983,7 +20060,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterIni
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsList
```
@@ -20024,6 +20101,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -20031,6 +20109,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -20126,7 +20220,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsOutputReference
```
@@ -20687,7 +20781,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3OutputReference
```
@@ -21132,7 +21226,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterIni
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumesOutputReference
```
@@ -21410,7 +21504,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterIni
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspaceOutputReference
```
@@ -21688,7 +21782,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterIni
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterOutputReference
```
@@ -22910,7 +23004,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewCluster
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClientsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClientsOutputReference
```
@@ -23217,7 +23311,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWor
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeOutputReference
```
@@ -23501,7 +23595,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWor
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsJobClusterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsJobClusterOutputReference
```
@@ -23839,7 +23933,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsLibraryCranOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsLibraryCranOutputReference
```
@@ -24139,7 +24233,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsLibraryCran
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsLibraryList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsLibraryList
```
@@ -24180,6 +24274,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -24187,6 +24282,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -24282,7 +24393,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsLibraryMavenOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsLibraryMavenOutputReference
```
@@ -24611,7 +24722,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsLibraryMaven
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsLibraryOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsLibraryOutputReference
```
@@ -25091,7 +25202,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsLibraryPypiOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsLibraryPypiOutputReference
```
@@ -25391,7 +25502,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsLibraryPypi
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterAutoscaleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterAutoscaleOutputReference
```
@@ -25698,7 +25809,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterAutoscale
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterAwsAttributesOutputReference
```
@@ -26179,7 +26290,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterAzureAttributesOutputReference
```
@@ -26515,7 +26626,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterAzureAttribut
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfsOutputReference
```
@@ -26786,7 +26897,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterClusterLogCon
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfOutputReference
```
@@ -27119,7 +27230,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterClusterLogCon
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3OutputReference
```
@@ -27564,7 +27675,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterClusterLogCon
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoList
```
@@ -27605,6 +27716,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -27612,6 +27724,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -27707,7 +27835,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference
```
@@ -28007,7 +28135,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterClusterMountI
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoOutputReference
```
@@ -28360,7 +28488,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuthOutputReference
```
@@ -28653,7 +28781,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterDockerImageBa
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterDockerImageOutputReference
```
@@ -28966,7 +29094,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterDockerImage
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterGcpAttributesOutputReference
```
@@ -29389,7 +29517,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterGcpAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfssOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfssOutputReference
```
@@ -29667,7 +29795,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAb
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfsOutputReference
```
@@ -29938,7 +30066,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDb
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFileOutputReference
```
@@ -30216,7 +30344,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFi
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcsOutputReference
```
@@ -30494,7 +30622,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGc
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterInitScriptsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsList
```
@@ -30535,6 +30663,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -30542,6 +30671,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -30637,7 +30782,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterInitScriptsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsOutputReference
```
@@ -31198,7 +31343,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3OutputReference
```
@@ -31643,7 +31788,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumesOutputReference
```
@@ -31921,7 +32066,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspaceOutputReference
```
@@ -32199,7 +32344,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterOutputReference
```
@@ -33421,7 +33566,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewCluster
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClientsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClientsOutputReference
```
@@ -33728,7 +33873,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeC
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeOutputReference
```
@@ -34012,7 +34157,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNewClusterWorkloadType
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNotebookTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNotebookTaskOutputReference
```
@@ -34341,7 +34486,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNotebookTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsNotificationSettingsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsNotificationSettingsOutputReference
```
@@ -34648,7 +34793,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsNotificationSettings
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsOutputReference
```
@@ -36266,7 +36411,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettings
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsParameterList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsParameterList
```
@@ -36307,6 +36452,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -36314,6 +36460,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -36409,7 +36571,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsParameterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsParameterOutputReference
```
@@ -36734,7 +36896,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsPipelineTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsPipelineTaskOutputReference
```
@@ -37034,7 +37196,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsPipelineTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsPythonWheelTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsPythonWheelTaskOutputReference
```
@@ -37399,7 +37561,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsPythonWheelTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsQueueOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsQueueOutputReference
```
@@ -37670,7 +37832,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsQueue
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsRunAsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsRunAsOutputReference
```
@@ -37977,7 +38139,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsRunAs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsRunJobTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsRunJobTaskOutputReference
```
@@ -38277,7 +38439,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsRunJobTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsScheduleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsScheduleOutputReference
```
@@ -38599,7 +38761,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsSchedule
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsSparkJarTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsSparkJarTaskOutputReference
```
@@ -38935,7 +39097,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsSparkJarTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsSparkPythonTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsSparkPythonTaskOutputReference
```
@@ -39264,7 +39426,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsSparkPythonTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsSparkSubmitTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsSparkSubmitTaskOutputReference
```
@@ -39542,7 +39704,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsSparkSubmitTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskConditionTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskConditionTaskOutputReference
```
@@ -39878,7 +40040,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskConditionTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskDbtTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskDbtTaskOutputReference
```
@@ -40294,7 +40456,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskDbtTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskDependsOnList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskDependsOnList
```
@@ -40335,6 +40497,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -40342,6 +40505,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -40437,7 +40616,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskDependsOnOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskDependsOnOutputReference
```
@@ -40755,7 +40934,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskEmailNotificationsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskEmailNotificationsOutputReference
```
@@ -41120,7 +41299,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskEmailNotifications
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskHealthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskHealthOutputReference
```
@@ -41404,7 +41583,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskHealth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskHealthRulesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskHealthRulesList
```
@@ -41445,6 +41624,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -41452,6 +41632,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -41547,7 +41743,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskHealthRulesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskHealthRulesOutputReference
```
@@ -41901,7 +42097,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskLibraryCranOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskLibraryCranOutputReference
```
@@ -42201,7 +42397,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskLibraryCran
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskLibraryList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskLibraryList
```
@@ -42242,6 +42438,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -42249,6 +42446,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -42344,7 +42557,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskLibraryMavenOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskLibraryMavenOutputReference
```
@@ -42673,7 +42886,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskLibraryMaven
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskLibraryOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskLibraryOutputReference
```
@@ -43153,7 +43366,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskLibraryPypiOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskLibraryPypiOutputReference
```
@@ -43453,7 +43666,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskLibraryPypi
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskList
```
@@ -43494,6 +43707,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -43501,6 +43715,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -43596,7 +43826,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscaleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscaleOutputReference
```
@@ -43903,7 +44133,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscale
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributesOutputReference
```
@@ -44384,7 +44614,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttrib
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributesOutputReference
```
@@ -44720,7 +44950,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttr
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfsOutputReference
```
@@ -44991,7 +45221,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfOutputReference
```
@@ -45324,7 +45554,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3OutputReference
```
@@ -45769,7 +45999,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoList
```
@@ -45810,6 +46040,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -45817,6 +46048,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -45912,7 +46159,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference
```
@@ -46212,7 +46459,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoOutputReference
```
@@ -46565,7 +46812,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuthOutputReference
```
@@ -46858,7 +47105,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerIma
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageOutputReference
```
@@ -47171,7 +47418,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerIma
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributesOutputReference
```
@@ -47594,7 +47841,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttrib
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfssOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfssOutputReference
```
@@ -47872,7 +48119,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScrip
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfsOutputReference
```
@@ -48143,7 +48390,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScrip
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFileOutputReference
```
@@ -48421,7 +48668,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScrip
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcsOutputReference
```
@@ -48699,7 +48946,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScrip
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsList
```
@@ -48740,6 +48987,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -48747,6 +48995,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -48842,7 +49106,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsOutputReference
```
@@ -49403,7 +49667,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3OutputReference
```
@@ -49848,7 +50112,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScrip
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumesOutputReference
```
@@ -50126,7 +50390,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScrip
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspaceOutputReference
```
@@ -50404,7 +50668,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScrip
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterOutputReference
```
@@ -51626,7 +51890,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewCluster
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClientsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClientsOutputReference
```
@@ -51933,7 +52197,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadT
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeOutputReference
```
@@ -52217,7 +52481,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadT
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNotebookTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNotebookTaskOutputReference
```
@@ -52546,7 +52810,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNotebookTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskNotificationSettingsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskNotificationSettingsOutputReference
```
@@ -52882,7 +53146,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskNotificationSetting
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskOutputReference
```
@@ -54153,7 +54417,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskPipelineTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskPipelineTaskOutputReference
```
@@ -54453,7 +54717,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskPipelineTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskPythonWheelTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskPythonWheelTaskOutputReference
```
@@ -54818,7 +55082,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskPythonWheelTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskRunJobTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskRunJobTaskOutputReference
```
@@ -55118,7 +55382,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskRunJobTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSparkJarTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskSparkJarTaskOutputReference
```
@@ -55454,7 +55718,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskSparkJarTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSparkPythonTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskSparkPythonTaskOutputReference
```
@@ -55783,7 +56047,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskSparkPythonTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSparkSubmitTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskSparkSubmitTaskOutputReference
```
@@ -56061,7 +56325,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskSparkSubmitTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertOutputReference
```
@@ -56396,7 +56660,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlert
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptionsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptionsList
```
@@ -56437,6 +56701,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -56444,6 +56709,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -56539,7 +56820,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptionsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptionsOutputReference
```
@@ -56864,7 +57145,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardOutputReference
```
@@ -57235,7 +57516,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboard
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptionsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptionsList
```
@@ -57276,6 +57557,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -57283,6 +57565,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -57378,7 +57676,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptionsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptionsOutputReference
```
@@ -57703,7 +58001,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSqlTaskFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskSqlTaskFileOutputReference
```
@@ -57974,7 +58272,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskSqlTaskFile
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSqlTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskSqlTaskOutputReference
```
@@ -58449,7 +58747,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskSqlTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskSqlTaskQueryOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskSqlTaskQueryOutputReference
```
@@ -58720,7 +59018,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskSqlTaskQuery
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceededList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceededList
```
@@ -58761,6 +59059,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -58768,6 +59067,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -58863,7 +59178,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceededOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceededOutputReference
```
@@ -59159,7 +59474,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailureList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailureList
```
@@ -59200,6 +59515,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -59207,6 +59523,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -59302,7 +59634,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailureOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailureOutputReference
```
@@ -59598,7 +59930,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStartList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStartList
```
@@ -59639,6 +59971,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -59646,6 +59979,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -59741,7 +60090,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStartOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStartOutputReference
```
@@ -60037,7 +60386,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccessList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccessList
```
@@ -60078,6 +60427,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -60085,6 +60435,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -60180,7 +60546,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccessOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccessOutputReference
```
@@ -60476,7 +60842,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOutputReference
```
@@ -60893,7 +61259,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTaskWebhookNotification
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTriggerFileArrivalOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTriggerFileArrivalOutputReference
```
@@ -61222,7 +61588,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTriggerFileArrival
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsTriggerOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsTriggerOutputReference
```
@@ -61535,7 +61901,7 @@ func InternalValue() DataDatabricksJobJobSettingsSettingsTrigger
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceededList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceededList
```
@@ -61576,6 +61942,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -61583,6 +61950,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -61678,7 +62061,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceededOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceededOutputReference
```
@@ -61974,7 +62357,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailureList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailureList
```
@@ -62015,6 +62398,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62022,6 +62406,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -62117,7 +62517,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailureOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailureOutputReference
```
@@ -62413,7 +62813,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStartList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStartList
```
@@ -62454,6 +62854,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62461,6 +62862,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -62556,7 +62973,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStartOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStartOutputReference
```
@@ -62852,7 +63269,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccessList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccessList
```
@@ -62893,6 +63310,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62900,6 +63318,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -62995,7 +63429,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccessOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccessOutputReference
```
@@ -63291,7 +63725,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjob"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjob"
datadatabricksjob.NewDataDatabricksJobJobSettingsSettingsWebhookNotificationsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksJobJobSettingsSettingsWebhookNotificationsOutputReference
```
diff --git a/docs/dataDatabricksJob.java.md b/docs/dataDatabricksJob.java.md
index 37a045dd0..117e6633f 100644
--- a/docs/dataDatabricksJob.java.md
+++ b/docs/dataDatabricksJob.java.md
@@ -163,6 +163,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -234,6 +235,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
@@ -11274,6 +11283,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -11281,6 +11291,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -14456,6 +14482,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -14463,6 +14490,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -14953,6 +14996,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -14960,6 +15004,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -17269,6 +17329,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -17276,6 +17337,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -20199,6 +20276,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -20206,6 +20284,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -24355,6 +24449,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -24362,6 +24457,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -27780,6 +27891,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -27787,6 +27899,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -30710,6 +30838,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -30717,6 +30846,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -36482,6 +36627,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -36489,6 +36635,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -40510,6 +40672,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -40517,6 +40680,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -41620,6 +41799,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -41627,6 +41807,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -42417,6 +42613,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -42424,6 +42621,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -43669,6 +43882,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -43676,6 +43890,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -45985,6 +46215,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -45992,6 +46223,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -48915,6 +49162,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -48922,6 +49170,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -56612,6 +56876,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -56619,6 +56884,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -57451,6 +57732,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -57458,6 +57740,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -58936,6 +59234,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -58943,6 +59242,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -59375,6 +59690,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -59382,6 +59698,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -59814,6 +60146,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -59821,6 +60154,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -60253,6 +60602,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -60260,6 +60610,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -61751,6 +62117,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -61758,6 +62125,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -62190,6 +62573,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -62197,6 +62581,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -62629,6 +63029,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -62636,6 +63037,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -63068,6 +63485,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -63075,6 +63493,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/dataDatabricksJob.python.md b/docs/dataDatabricksJob.python.md
index b5cb3d81b..a1642415b 100644
--- a/docs/dataDatabricksJob.python.md
+++ b/docs/dataDatabricksJob.python.md
@@ -161,6 +161,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -237,6 +238,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
@@ -11682,6 +11691,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -11689,6 +11699,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -15127,6 +15155,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -15134,6 +15163,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -15659,6 +15706,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -15666,6 +15714,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -18195,6 +18261,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -18202,6 +18269,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -21386,6 +21471,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -21393,6 +21479,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -26350,6 +26454,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -26357,6 +26462,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -30120,6 +30243,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -30127,6 +30251,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -33311,6 +33453,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -33318,6 +33461,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -40320,6 +40481,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -40327,6 +40489,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -44658,6 +44838,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -44665,6 +44846,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -45855,6 +46054,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -45862,6 +46062,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -46712,6 +46930,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -46719,6 +46938,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -48097,6 +48334,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -48104,6 +48342,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -50633,6 +50889,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -50640,6 +50897,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -53824,6 +54099,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -53831,6 +54107,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -62888,6 +63182,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -62895,6 +63190,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -63789,6 +64102,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -63796,6 +64110,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -65449,6 +65781,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -65456,6 +65789,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -65923,6 +66274,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -65930,6 +66282,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -66397,6 +66767,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -66404,6 +66775,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -66871,6 +67260,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -66878,6 +67268,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -68509,6 +68917,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -68516,6 +68925,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -68983,6 +69410,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -68990,6 +69418,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -69457,6 +69903,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -69464,6 +69911,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -69931,6 +70396,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -69938,6 +70404,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/dataDatabricksJob.typescript.md b/docs/dataDatabricksJob.typescript.md
index 524e6a3d2..673214169 100644
--- a/docs/dataDatabricksJob.typescript.md
+++ b/docs/dataDatabricksJob.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -125,6 +126,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
@@ -10446,6 +10455,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -10453,6 +10463,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -13628,6 +13654,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -13635,6 +13662,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -14125,6 +14168,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -14132,6 +14176,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -16441,6 +16501,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -16448,6 +16509,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -19371,6 +19448,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -19378,6 +19456,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -23527,6 +23621,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -23534,6 +23629,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -26952,6 +27063,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -26959,6 +27071,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -29882,6 +30010,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -29889,6 +30018,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -35654,6 +35799,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -35661,6 +35807,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -39682,6 +39844,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -39689,6 +39852,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -40792,6 +40971,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -40799,6 +40979,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -41589,6 +41785,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -41596,6 +41793,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -42841,6 +43054,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -42848,6 +43062,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -45157,6 +45387,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -45164,6 +45395,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -48087,6 +48334,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -48094,6 +48342,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -55784,6 +56048,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -55791,6 +56056,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -56623,6 +56904,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -56630,6 +56912,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -58108,6 +58406,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -58115,6 +58414,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -58547,6 +58862,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -58554,6 +58870,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -58986,6 +59318,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -58993,6 +59326,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -59425,6 +59774,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -59432,6 +59782,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -60923,6 +61289,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -60930,6 +61297,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -61362,6 +61745,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -61369,6 +61753,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -61801,6 +62201,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -61808,6 +62209,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -62240,6 +62657,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -62247,6 +62665,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/dataDatabricksJobs.csharp.md b/docs/dataDatabricksJobs.csharp.md
index 7518b3383..406516bc9 100644
--- a/docs/dataDatabricksJobs.csharp.md
+++ b/docs/dataDatabricksJobs.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksJobs.go.md b/docs/dataDatabricksJobs.go.md
index 8a7bc49de..9c0b82827 100644
--- a/docs/dataDatabricksJobs.go.md
+++ b/docs/dataDatabricksJobs.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjobs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjobs"
datadatabricksjobs.NewDataDatabricksJobs(scope Construct, id *string, config DataDatabricksJobsConfig) DataDatabricksJobs
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetIds()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjobs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjobs"
datadatabricksjobs.DataDatabricksJobs_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjobs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjobs"
datadatabricksjobs.DataDatabricksJobs_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksjobs.DataDatabricksJobs_IsTerraformElement(x interface{}) *bool
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjobs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjobs"
datadatabricksjobs.DataDatabricksJobs_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksjobs.DataDatabricksJobs_IsTerraformDataSource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjobs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjobs"
datadatabricksjobs.DataDatabricksJobs_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -592,7 +601,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksjobs"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksjobs"
&datadatabricksjobs.DataDatabricksJobsConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksJobs.java.md b/docs/dataDatabricksJobs.java.md
index acaf7ca8f..9c609f0dd 100644
--- a/docs/dataDatabricksJobs.java.md
+++ b/docs/dataDatabricksJobs.java.md
@@ -131,6 +131,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -198,6 +199,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksJobs.python.md b/docs/dataDatabricksJobs.python.md
index 8a2ad2b43..1dd8cade6 100644
--- a/docs/dataDatabricksJobs.python.md
+++ b/docs/dataDatabricksJobs.python.md
@@ -129,6 +129,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -201,6 +202,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksJobs.typescript.md b/docs/dataDatabricksJobs.typescript.md
index d11825aed..071a2dccb 100644
--- a/docs/dataDatabricksJobs.typescript.md
+++ b/docs/dataDatabricksJobs.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksMetastore.csharp.md b/docs/dataDatabricksMetastore.csharp.md
index fdb4c9f67..91328c4b1 100644
--- a/docs/dataDatabricksMetastore.csharp.md
+++ b/docs/dataDatabricksMetastore.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksMetastore.go.md b/docs/dataDatabricksMetastore.go.md
index 3c643ec15..ccf5597e5 100644
--- a/docs/dataDatabricksMetastore.go.md
+++ b/docs/dataDatabricksMetastore.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastore"
datadatabricksmetastore.NewDataDatabricksMetastore(scope Construct, id *string, config DataDatabricksMetastoreConfig) DataDatabricksMetastore
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -294,7 +303,7 @@ func ResetMetastoreInfo()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastore"
datadatabricksmetastore.DataDatabricksMetastore_IsConstruct(x interface{}) *bool
```
@@ -326,7 +335,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastore"
datadatabricksmetastore.DataDatabricksMetastore_IsTerraformElement(x interface{}) *bool
```
@@ -340,7 +349,7 @@ datadatabricksmetastore.DataDatabricksMetastore_IsTerraformElement(x interface{}
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastore"
datadatabricksmetastore.DataDatabricksMetastore_IsTerraformDataSource(x interface{}) *bool
```
@@ -354,7 +363,7 @@ datadatabricksmetastore.DataDatabricksMetastore_IsTerraformDataSource(x interfac
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastore"
datadatabricksmetastore.DataDatabricksMetastore_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -627,7 +636,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastore"
&datadatabricksmetastore.DataDatabricksMetastoreConfig {
Connection: interface{},
@@ -639,7 +648,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
Provisioners: *[]interface{},
MetastoreId: *string,
Id: *string,
- MetastoreInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksMetastore.DataDatabricksMetastoreMetastoreInfo,
+ MetastoreInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksMetastore.DataDatabricksMetastoreMetastoreInfo,
}
```
@@ -776,7 +785,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastore"
&datadatabricksmetastore.DataDatabricksMetastoreMetastoreInfo {
Cloud: *string,
@@ -1048,7 +1057,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastore"
datadatabricksmetastore.NewDataDatabricksMetastoreMetastoreInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksMetastoreMetastoreInfoOutputReference
```
diff --git a/docs/dataDatabricksMetastore.java.md b/docs/dataDatabricksMetastore.java.md
index a451c681d..927c6103c 100644
--- a/docs/dataDatabricksMetastore.java.md
+++ b/docs/dataDatabricksMetastore.java.md
@@ -143,6 +143,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -211,6 +212,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksMetastore.python.md b/docs/dataDatabricksMetastore.python.md
index 0f519b9a0..53d094554 100644
--- a/docs/dataDatabricksMetastore.python.md
+++ b/docs/dataDatabricksMetastore.python.md
@@ -141,6 +141,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -214,6 +215,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksMetastore.typescript.md b/docs/dataDatabricksMetastore.typescript.md
index ebe5545a4..cc8b47df1 100644
--- a/docs/dataDatabricksMetastore.typescript.md
+++ b/docs/dataDatabricksMetastore.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksMetastores.csharp.md b/docs/dataDatabricksMetastores.csharp.md
index 821e11265..4bed395d2 100644
--- a/docs/dataDatabricksMetastores.csharp.md
+++ b/docs/dataDatabricksMetastores.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksMetastores.go.md b/docs/dataDatabricksMetastores.go.md
index 67c31881f..3ce2c3332 100644
--- a/docs/dataDatabricksMetastores.go.md
+++ b/docs/dataDatabricksMetastores.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastores"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastores"
datadatabricksmetastores.NewDataDatabricksMetastores(scope Construct, id *string, config DataDatabricksMetastoresConfig) DataDatabricksMetastores
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetIds()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastores"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastores"
datadatabricksmetastores.DataDatabricksMetastores_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastores"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastores"
datadatabricksmetastores.DataDatabricksMetastores_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksmetastores.DataDatabricksMetastores_IsTerraformElement(x interface
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastores"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastores"
datadatabricksmetastores.DataDatabricksMetastores_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksmetastores.DataDatabricksMetastores_IsTerraformDataSource(x interf
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastores"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastores"
datadatabricksmetastores.DataDatabricksMetastores_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -592,7 +601,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmetastores"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmetastores"
&datadatabricksmetastores.DataDatabricksMetastoresConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksMetastores.java.md b/docs/dataDatabricksMetastores.java.md
index 7da3d630f..094bca6a2 100644
--- a/docs/dataDatabricksMetastores.java.md
+++ b/docs/dataDatabricksMetastores.java.md
@@ -131,6 +131,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -198,6 +199,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksMetastores.python.md b/docs/dataDatabricksMetastores.python.md
index 069f6052d..b7a74ccaa 100644
--- a/docs/dataDatabricksMetastores.python.md
+++ b/docs/dataDatabricksMetastores.python.md
@@ -129,6 +129,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -201,6 +202,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksMetastores.typescript.md b/docs/dataDatabricksMetastores.typescript.md
index c6259501d..76ff2d2c5 100644
--- a/docs/dataDatabricksMetastores.typescript.md
+++ b/docs/dataDatabricksMetastores.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksMlflowModel.csharp.md b/docs/dataDatabricksMlflowModel.csharp.md
index fb27f10ed..9078236b1 100644
--- a/docs/dataDatabricksMlflowModel.csharp.md
+++ b/docs/dataDatabricksMlflowModel.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -126,6 +127,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
@@ -1265,6 +1274,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1272,6 +1282,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -2065,6 +2091,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2072,6 +2099,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -2533,6 +2576,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2540,6 +2584,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/dataDatabricksMlflowModel.go.md b/docs/dataDatabricksMlflowModel.go.md
index ff66cc2f7..ac00fd78a 100644
--- a/docs/dataDatabricksMlflowModel.go.md
+++ b/docs/dataDatabricksMlflowModel.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.NewDataDatabricksMlflowModel(scope Construct, id *string, config DataDatabricksMlflowModelConfig) DataDatabricksMlflowModel
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -126,6 +127,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -328,7 +337,7 @@ func ResetUserId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.DataDatabricksMlflowModel_IsConstruct(x interface{}) *bool
```
@@ -360,7 +369,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.DataDatabricksMlflowModel_IsTerraformElement(x interface{}) *bool
```
@@ -374,7 +383,7 @@ datadatabricksmlflowmodel.DataDatabricksMlflowModel_IsTerraformElement(x interfa
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.DataDatabricksMlflowModel_IsTerraformDataSource(x interface{}) *bool
```
@@ -388,7 +397,7 @@ datadatabricksmlflowmodel.DataDatabricksMlflowModel_IsTerraformDataSource(x inte
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.DataDatabricksMlflowModel_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -738,7 +747,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
&datadatabricksmlflowmodel.DataDatabricksMlflowModelConfig {
Connection: interface{},
@@ -928,7 +937,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
&datadatabricksmlflowmodel.DataDatabricksMlflowModelLatestVersions {
CreationTimestamp: *f64,
@@ -1130,7 +1139,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
&datadatabricksmlflowmodel.DataDatabricksMlflowModelLatestVersionsTags {
Key: *string,
@@ -1176,7 +1185,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
&datadatabricksmlflowmodel.DataDatabricksMlflowModelTags {
Key: *string,
@@ -1224,7 +1233,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.NewDataDatabricksMlflowModelLatestVersionsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksMlflowModelLatestVersionsList
```
@@ -1265,6 +1274,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1272,6 +1282,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1367,7 +1393,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.NewDataDatabricksMlflowModelLatestVersionsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksMlflowModelLatestVersionsOutputReference
```
@@ -2024,7 +2050,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.NewDataDatabricksMlflowModelLatestVersionsTagsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksMlflowModelLatestVersionsTagsList
```
@@ -2065,6 +2091,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2072,6 +2099,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2167,7 +2210,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.NewDataDatabricksMlflowModelLatestVersionsTagsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksMlflowModelLatestVersionsTagsOutputReference
```
@@ -2492,7 +2535,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.NewDataDatabricksMlflowModelTagsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksMlflowModelTagsList
```
@@ -2533,6 +2576,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2540,6 +2584,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2635,7 +2695,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmlflowmodel"
datadatabricksmlflowmodel.NewDataDatabricksMlflowModelTagsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksMlflowModelTagsOutputReference
```
diff --git a/docs/dataDatabricksMlflowModel.java.md b/docs/dataDatabricksMlflowModel.java.md
index aca6d9dbc..8d4288d1d 100644
--- a/docs/dataDatabricksMlflowModel.java.md
+++ b/docs/dataDatabricksMlflowModel.java.md
@@ -174,6 +174,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -246,6 +247,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
@@ -1392,6 +1401,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1399,6 +1409,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -2192,6 +2218,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2199,6 +2226,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -2660,6 +2703,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2667,6 +2711,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/dataDatabricksMlflowModel.python.md b/docs/dataDatabricksMlflowModel.python.md
index 35c764be7..997b6bae2 100644
--- a/docs/dataDatabricksMlflowModel.python.md
+++ b/docs/dataDatabricksMlflowModel.python.md
@@ -170,6 +170,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -247,6 +248,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
@@ -1425,6 +1434,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1432,6 +1442,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -2262,6 +2290,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2269,6 +2298,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -2765,6 +2812,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2772,6 +2820,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/dataDatabricksMlflowModel.typescript.md b/docs/dataDatabricksMlflowModel.typescript.md
index d5a20e63d..7d6194b96 100644
--- a/docs/dataDatabricksMlflowModel.typescript.md
+++ b/docs/dataDatabricksMlflowModel.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -126,6 +127,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
@@ -1231,6 +1240,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1238,6 +1248,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -2031,6 +2057,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2038,6 +2065,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -2499,6 +2542,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2506,6 +2550,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/dataDatabricksMwsCredentials.csharp.md b/docs/dataDatabricksMwsCredentials.csharp.md
index cd2a91edc..413f1169b 100644
--- a/docs/dataDatabricksMwsCredentials.csharp.md
+++ b/docs/dataDatabricksMwsCredentials.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksMwsCredentials.go.md b/docs/dataDatabricksMwsCredentials.go.md
index 7092a4a72..ae78be7c4 100644
--- a/docs/dataDatabricksMwsCredentials.go.md
+++ b/docs/dataDatabricksMwsCredentials.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwscredentials"
datadatabricksmwscredentials.NewDataDatabricksMwsCredentials(scope Construct, id *string, config DataDatabricksMwsCredentialsConfig) DataDatabricksMwsCredentials
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetIds()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwscredentials"
datadatabricksmwscredentials.DataDatabricksMwsCredentials_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwscredentials"
datadatabricksmwscredentials.DataDatabricksMwsCredentials_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksmwscredentials.DataDatabricksMwsCredentials_IsTerraformElement(x i
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwscredentials"
datadatabricksmwscredentials.DataDatabricksMwsCredentials_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksmwscredentials.DataDatabricksMwsCredentials_IsTerraformDataSource(
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwscredentials"
datadatabricksmwscredentials.DataDatabricksMwsCredentials_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -592,7 +601,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwscredentials"
&datadatabricksmwscredentials.DataDatabricksMwsCredentialsConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksMwsCredentials.java.md b/docs/dataDatabricksMwsCredentials.java.md
index 8c749d9b0..722d4c595 100644
--- a/docs/dataDatabricksMwsCredentials.java.md
+++ b/docs/dataDatabricksMwsCredentials.java.md
@@ -131,6 +131,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -198,6 +199,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksMwsCredentials.python.md b/docs/dataDatabricksMwsCredentials.python.md
index f6299c5bc..0e3e85c01 100644
--- a/docs/dataDatabricksMwsCredentials.python.md
+++ b/docs/dataDatabricksMwsCredentials.python.md
@@ -129,6 +129,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -201,6 +202,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksMwsCredentials.typescript.md b/docs/dataDatabricksMwsCredentials.typescript.md
index 746e8e98e..257946eb4 100644
--- a/docs/dataDatabricksMwsCredentials.typescript.md
+++ b/docs/dataDatabricksMwsCredentials.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksMwsWorkspaces.csharp.md b/docs/dataDatabricksMwsWorkspaces.csharp.md
index 50242f8ed..2c4c7c7ba 100644
--- a/docs/dataDatabricksMwsWorkspaces.csharp.md
+++ b/docs/dataDatabricksMwsWorkspaces.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksMwsWorkspaces.go.md b/docs/dataDatabricksMwsWorkspaces.go.md
index 592582f35..facb07915 100644
--- a/docs/dataDatabricksMwsWorkspaces.go.md
+++ b/docs/dataDatabricksMwsWorkspaces.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwsworkspaces"
datadatabricksmwsworkspaces.NewDataDatabricksMwsWorkspaces(scope Construct, id *string, config DataDatabricksMwsWorkspacesConfig) DataDatabricksMwsWorkspaces
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetIds()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwsworkspaces"
datadatabricksmwsworkspaces.DataDatabricksMwsWorkspaces_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwsworkspaces"
datadatabricksmwsworkspaces.DataDatabricksMwsWorkspaces_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksmwsworkspaces.DataDatabricksMwsWorkspaces_IsTerraformElement(x int
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwsworkspaces"
datadatabricksmwsworkspaces.DataDatabricksMwsWorkspaces_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksmwsworkspaces.DataDatabricksMwsWorkspaces_IsTerraformDataSource(x
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwsworkspaces"
datadatabricksmwsworkspaces.DataDatabricksMwsWorkspaces_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -592,7 +601,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksmwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksmwsworkspaces"
&datadatabricksmwsworkspaces.DataDatabricksMwsWorkspacesConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksMwsWorkspaces.java.md b/docs/dataDatabricksMwsWorkspaces.java.md
index 83c755483..7fae3244d 100644
--- a/docs/dataDatabricksMwsWorkspaces.java.md
+++ b/docs/dataDatabricksMwsWorkspaces.java.md
@@ -131,6 +131,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -198,6 +199,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksMwsWorkspaces.python.md b/docs/dataDatabricksMwsWorkspaces.python.md
index 59caee4f5..8d8bcfbe7 100644
--- a/docs/dataDatabricksMwsWorkspaces.python.md
+++ b/docs/dataDatabricksMwsWorkspaces.python.md
@@ -129,6 +129,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -201,6 +202,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksMwsWorkspaces.typescript.md b/docs/dataDatabricksMwsWorkspaces.typescript.md
index 418391994..b1ebcc4fe 100644
--- a/docs/dataDatabricksMwsWorkspaces.typescript.md
+++ b/docs/dataDatabricksMwsWorkspaces.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksNodeType.csharp.md b/docs/dataDatabricksNodeType.csharp.md
index bc33721ab..e95011b2e 100644
--- a/docs/dataDatabricksNodeType.csharp.md
+++ b/docs/dataDatabricksNodeType.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -133,6 +134,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksNodeType.go.md b/docs/dataDatabricksNodeType.go.md
index 7fae7c5a4..97aee9fcc 100644
--- a/docs/dataDatabricksNodeType.go.md
+++ b/docs/dataDatabricksNodeType.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnodetype"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnodetype"
datadatabricksnodetype.NewDataDatabricksNodeType(scope Construct, id *string, config DataDatabricksNodeTypeConfig) DataDatabricksNodeType
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -133,6 +134,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -365,7 +374,7 @@ func ResetSupportPortForwarding()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnodetype"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnodetype"
datadatabricksnodetype.DataDatabricksNodeType_IsConstruct(x interface{}) *bool
```
@@ -397,7 +406,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnodetype"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnodetype"
datadatabricksnodetype.DataDatabricksNodeType_IsTerraformElement(x interface{}) *bool
```
@@ -411,7 +420,7 @@ datadatabricksnodetype.DataDatabricksNodeType_IsTerraformElement(x interface{})
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnodetype"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnodetype"
datadatabricksnodetype.DataDatabricksNodeType_IsTerraformDataSource(x interface{}) *bool
```
@@ -425,7 +434,7 @@ datadatabricksnodetype.DataDatabricksNodeType_IsTerraformDataSource(x interface{
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnodetype"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnodetype"
datadatabricksnodetype.DataDatabricksNodeType_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -940,7 +949,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnodetype"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnodetype"
&datadatabricksnodetype.DataDatabricksNodeTypeConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksNodeType.java.md b/docs/dataDatabricksNodeType.java.md
index 28f1ff883..692c45a91 100644
--- a/docs/dataDatabricksNodeType.java.md
+++ b/docs/dataDatabricksNodeType.java.md
@@ -258,6 +258,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -337,6 +338,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksNodeType.python.md b/docs/dataDatabricksNodeType.python.md
index 09ba8cb54..9c866ff85 100644
--- a/docs/dataDatabricksNodeType.python.md
+++ b/docs/dataDatabricksNodeType.python.md
@@ -249,6 +249,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -333,6 +334,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksNodeType.typescript.md b/docs/dataDatabricksNodeType.typescript.md
index 17c5c7918..9215b24af 100644
--- a/docs/dataDatabricksNodeType.typescript.md
+++ b/docs/dataDatabricksNodeType.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -133,6 +134,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksNotebook.csharp.md b/docs/dataDatabricksNotebook.csharp.md
index 3bac288d6..9b972ea98 100644
--- a/docs/dataDatabricksNotebook.csharp.md
+++ b/docs/dataDatabricksNotebook.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -123,6 +124,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksNotebook.go.md b/docs/dataDatabricksNotebook.go.md
index 6ecd3659d..f0a90e6ae 100644
--- a/docs/dataDatabricksNotebook.go.md
+++ b/docs/dataDatabricksNotebook.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebook"
datadatabricksnotebook.NewDataDatabricksNotebook(scope Construct, id *string, config DataDatabricksNotebookConfig) DataDatabricksNotebook
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -123,6 +124,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -295,7 +304,7 @@ func ResetObjectType()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebook"
datadatabricksnotebook.DataDatabricksNotebook_IsConstruct(x interface{}) *bool
```
@@ -327,7 +336,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebook"
datadatabricksnotebook.DataDatabricksNotebook_IsTerraformElement(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksnotebook.DataDatabricksNotebook_IsTerraformElement(x interface{})
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebook"
datadatabricksnotebook.DataDatabricksNotebook_IsTerraformDataSource(x interface{}) *bool
```
@@ -355,7 +364,7 @@ datadatabricksnotebook.DataDatabricksNotebook_IsTerraformDataSource(x interface{
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebook"
datadatabricksnotebook.DataDatabricksNotebook_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -705,7 +714,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebook"
&datadatabricksnotebook.DataDatabricksNotebookConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksNotebook.java.md b/docs/dataDatabricksNotebook.java.md
index a9a3cb0fa..ee4c2d19b 100644
--- a/docs/dataDatabricksNotebook.java.md
+++ b/docs/dataDatabricksNotebook.java.md
@@ -171,6 +171,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -240,6 +241,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksNotebook.python.md b/docs/dataDatabricksNotebook.python.md
index b6b1a94a6..f81b10412 100644
--- a/docs/dataDatabricksNotebook.python.md
+++ b/docs/dataDatabricksNotebook.python.md
@@ -169,6 +169,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -243,6 +244,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksNotebook.typescript.md b/docs/dataDatabricksNotebook.typescript.md
index 5d3cdaff4..bc5498c39 100644
--- a/docs/dataDatabricksNotebook.typescript.md
+++ b/docs/dataDatabricksNotebook.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -123,6 +124,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksNotebookPaths.csharp.md b/docs/dataDatabricksNotebookPaths.csharp.md
index 6f16c3d6b..fff7ad001 100644
--- a/docs/dataDatabricksNotebookPaths.csharp.md
+++ b/docs/dataDatabricksNotebookPaths.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
@@ -821,6 +830,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -828,6 +838,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/dataDatabricksNotebookPaths.go.md b/docs/dataDatabricksNotebookPaths.go.md
index 37c04134a..2a13cf4ea 100644
--- a/docs/dataDatabricksNotebookPaths.go.md
+++ b/docs/dataDatabricksNotebookPaths.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebookpaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebookpaths"
datadatabricksnotebookpaths.NewDataDatabricksNotebookPaths(scope Construct, id *string, config DataDatabricksNotebookPathsConfig) DataDatabricksNotebookPaths
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -274,7 +283,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebookpaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebookpaths"
datadatabricksnotebookpaths.DataDatabricksNotebookPaths_IsConstruct(x interface{}) *bool
```
@@ -306,7 +315,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebookpaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebookpaths"
datadatabricksnotebookpaths.DataDatabricksNotebookPaths_IsTerraformElement(x interface{}) *bool
```
@@ -320,7 +329,7 @@ datadatabricksnotebookpaths.DataDatabricksNotebookPaths_IsTerraformElement(x int
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebookpaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebookpaths"
datadatabricksnotebookpaths.DataDatabricksNotebookPaths_IsTerraformDataSource(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabricksnotebookpaths.DataDatabricksNotebookPaths_IsTerraformDataSource(x
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebookpaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebookpaths"
datadatabricksnotebookpaths.DataDatabricksNotebookPaths_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -618,7 +627,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebookpaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebookpaths"
&datadatabricksnotebookpaths.DataDatabricksNotebookPathsConfig {
Connection: interface{},
@@ -765,7 +774,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebookpaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebookpaths"
&datadatabricksnotebookpaths.DataDatabricksNotebookPathsNotebookPathListStruct {
@@ -780,7 +789,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebookpaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebookpaths"
datadatabricksnotebookpaths.NewDataDatabricksNotebookPathsNotebookPathListStructList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksNotebookPathsNotebookPathListStructList
```
@@ -821,6 +830,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -828,6 +838,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -912,7 +938,7 @@ func Fqn() *string
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksnotebookpaths"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksnotebookpaths"
datadatabricksnotebookpaths.NewDataDatabricksNotebookPathsNotebookPathListStructOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksNotebookPathsNotebookPathListStructOutputReference
```
diff --git a/docs/dataDatabricksNotebookPaths.java.md b/docs/dataDatabricksNotebookPaths.java.md
index dcf586227..ff9b0fff0 100644
--- a/docs/dataDatabricksNotebookPaths.java.md
+++ b/docs/dataDatabricksNotebookPaths.java.md
@@ -142,6 +142,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -208,6 +209,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
@@ -913,6 +922,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -920,6 +930,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/dataDatabricksNotebookPaths.python.md b/docs/dataDatabricksNotebookPaths.python.md
index 0a00e1570..22278e58b 100644
--- a/docs/dataDatabricksNotebookPaths.python.md
+++ b/docs/dataDatabricksNotebookPaths.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -210,6 +211,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
@@ -944,6 +953,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -951,6 +961,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/dataDatabricksNotebookPaths.typescript.md b/docs/dataDatabricksNotebookPaths.typescript.md
index 6b6e7568c..399e27e3e 100644
--- a/docs/dataDatabricksNotebookPaths.typescript.md
+++ b/docs/dataDatabricksNotebookPaths.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
@@ -808,6 +817,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -815,6 +825,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/dataDatabricksPipelines.csharp.md b/docs/dataDatabricksPipelines.csharp.md
index 9790448c8..7c4aea2c5 100644
--- a/docs/dataDatabricksPipelines.csharp.md
+++ b/docs/dataDatabricksPipelines.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksPipelines.go.md b/docs/dataDatabricksPipelines.go.md
index af60a7857..88f08b741 100644
--- a/docs/dataDatabricksPipelines.go.md
+++ b/docs/dataDatabricksPipelines.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickspipelines"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickspipelines"
datadatabrickspipelines.NewDataDatabricksPipelines(scope Construct, id *string, config DataDatabricksPipelinesConfig) DataDatabricksPipelines
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -288,7 +297,7 @@ func ResetPipelineName()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickspipelines"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickspipelines"
datadatabrickspipelines.DataDatabricksPipelines_IsConstruct(x interface{}) *bool
```
@@ -320,7 +329,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickspipelines"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickspipelines"
datadatabrickspipelines.DataDatabricksPipelines_IsTerraformElement(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabrickspipelines.DataDatabricksPipelines_IsTerraformElement(x interface{}
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickspipelines"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickspipelines"
datadatabrickspipelines.DataDatabricksPipelines_IsTerraformDataSource(x interface{}) *bool
```
@@ -348,7 +357,7 @@ datadatabrickspipelines.DataDatabricksPipelines_IsTerraformDataSource(x interfac
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickspipelines"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickspipelines"
datadatabrickspipelines.DataDatabricksPipelines_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -621,7 +630,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickspipelines"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickspipelines"
&datadatabrickspipelines.DataDatabricksPipelinesConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksPipelines.java.md b/docs/dataDatabricksPipelines.java.md
index 09f793660..271cba108 100644
--- a/docs/dataDatabricksPipelines.java.md
+++ b/docs/dataDatabricksPipelines.java.md
@@ -141,6 +141,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -209,6 +210,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksPipelines.python.md b/docs/dataDatabricksPipelines.python.md
index 95db75c6d..068a0f58f 100644
--- a/docs/dataDatabricksPipelines.python.md
+++ b/docs/dataDatabricksPipelines.python.md
@@ -139,6 +139,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -212,6 +213,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksPipelines.typescript.md b/docs/dataDatabricksPipelines.typescript.md
index b6cb82d34..9c456799e 100644
--- a/docs/dataDatabricksPipelines.typescript.md
+++ b/docs/dataDatabricksPipelines.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksSchemas.csharp.md b/docs/dataDatabricksSchemas.csharp.md
index 1a6176db2..daaf15aae 100644
--- a/docs/dataDatabricksSchemas.csharp.md
+++ b/docs/dataDatabricksSchemas.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksSchemas.go.md b/docs/dataDatabricksSchemas.go.md
index ff458901b..e9e1f81b7 100644
--- a/docs/dataDatabricksSchemas.go.md
+++ b/docs/dataDatabricksSchemas.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksschemas"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksschemas"
datadatabricksschemas.NewDataDatabricksSchemas(scope Construct, id *string, config DataDatabricksSchemasConfig) DataDatabricksSchemas
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetIds()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksschemas"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksschemas"
datadatabricksschemas.DataDatabricksSchemas_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksschemas"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksschemas"
datadatabricksschemas.DataDatabricksSchemas_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksschemas.DataDatabricksSchemas_IsTerraformElement(x interface{}) *b
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksschemas"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksschemas"
datadatabricksschemas.DataDatabricksSchemas_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksschemas.DataDatabricksSchemas_IsTerraformDataSource(x interface{})
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksschemas"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksschemas"
datadatabricksschemas.DataDatabricksSchemas_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -614,7 +623,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksschemas"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksschemas"
&datadatabricksschemas.DataDatabricksSchemasConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksSchemas.java.md b/docs/dataDatabricksSchemas.java.md
index 921a89d99..049c25441 100644
--- a/docs/dataDatabricksSchemas.java.md
+++ b/docs/dataDatabricksSchemas.java.md
@@ -141,6 +141,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -208,6 +209,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksSchemas.python.md b/docs/dataDatabricksSchemas.python.md
index 73307f46b..2d11b3552 100644
--- a/docs/dataDatabricksSchemas.python.md
+++ b/docs/dataDatabricksSchemas.python.md
@@ -139,6 +139,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -211,6 +212,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksSchemas.typescript.md b/docs/dataDatabricksSchemas.typescript.md
index a8839553d..0875d0c8c 100644
--- a/docs/dataDatabricksSchemas.typescript.md
+++ b/docs/dataDatabricksSchemas.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksServicePrincipal.csharp.md b/docs/dataDatabricksServicePrincipal.csharp.md
index 09c1027b1..b104b560b 100644
--- a/docs/dataDatabricksServicePrincipal.csharp.md
+++ b/docs/dataDatabricksServicePrincipal.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -128,6 +129,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksServicePrincipal.go.md b/docs/dataDatabricksServicePrincipal.go.md
index 98c83167b..e4514793d 100644
--- a/docs/dataDatabricksServicePrincipal.go.md
+++ b/docs/dataDatabricksServicePrincipal.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipal"
datadatabricksserviceprincipal.NewDataDatabricksServicePrincipal(scope Construct, id *string, config DataDatabricksServicePrincipalConfig) DataDatabricksServicePrincipal
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -128,6 +129,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -330,7 +339,7 @@ func ResetSpId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipal"
datadatabricksserviceprincipal.DataDatabricksServicePrincipal_IsConstruct(x interface{}) *bool
```
@@ -362,7 +371,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipal"
datadatabricksserviceprincipal.DataDatabricksServicePrincipal_IsTerraformElement(x interface{}) *bool
```
@@ -376,7 +385,7 @@ datadatabricksserviceprincipal.DataDatabricksServicePrincipal_IsTerraformElement
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipal"
datadatabricksserviceprincipal.DataDatabricksServicePrincipal_IsTerraformDataSource(x interface{}) *bool
```
@@ -390,7 +399,7 @@ datadatabricksserviceprincipal.DataDatabricksServicePrincipal_IsTerraformDataSou
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipal"
datadatabricksserviceprincipal.DataDatabricksServicePrincipal_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -795,7 +804,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipal"
&datadatabricksserviceprincipal.DataDatabricksServicePrincipalConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksServicePrincipal.java.md b/docs/dataDatabricksServicePrincipal.java.md
index fff8979c9..bfeb26993 100644
--- a/docs/dataDatabricksServicePrincipal.java.md
+++ b/docs/dataDatabricksServicePrincipal.java.md
@@ -202,6 +202,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -276,6 +277,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksServicePrincipal.python.md b/docs/dataDatabricksServicePrincipal.python.md
index d1875a543..7a27fcefd 100644
--- a/docs/dataDatabricksServicePrincipal.python.md
+++ b/docs/dataDatabricksServicePrincipal.python.md
@@ -199,6 +199,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -278,6 +279,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksServicePrincipal.typescript.md b/docs/dataDatabricksServicePrincipal.typescript.md
index 6e7c022d0..9aa8f2869 100644
--- a/docs/dataDatabricksServicePrincipal.typescript.md
+++ b/docs/dataDatabricksServicePrincipal.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -128,6 +129,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksServicePrincipals.csharp.md b/docs/dataDatabricksServicePrincipals.csharp.md
index cb46081c0..57dc27dcc 100644
--- a/docs/dataDatabricksServicePrincipals.csharp.md
+++ b/docs/dataDatabricksServicePrincipals.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksServicePrincipals.go.md b/docs/dataDatabricksServicePrincipals.go.md
index e1b25c9a7..439005fe8 100644
--- a/docs/dataDatabricksServicePrincipals.go.md
+++ b/docs/dataDatabricksServicePrincipals.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipals"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipals"
datadatabricksserviceprincipals.NewDataDatabricksServicePrincipals(scope Construct, id *string, config DataDatabricksServicePrincipalsConfig) DataDatabricksServicePrincipals
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -288,7 +297,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipals"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipals"
datadatabricksserviceprincipals.DataDatabricksServicePrincipals_IsConstruct(x interface{}) *bool
```
@@ -320,7 +329,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipals"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipals"
datadatabricksserviceprincipals.DataDatabricksServicePrincipals_IsTerraformElement(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabricksserviceprincipals.DataDatabricksServicePrincipals_IsTerraformEleme
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipals"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipals"
datadatabricksserviceprincipals.DataDatabricksServicePrincipals_IsTerraformDataSource(x interface{}) *bool
```
@@ -348,7 +357,7 @@ datadatabricksserviceprincipals.DataDatabricksServicePrincipals_IsTerraformDataS
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipals"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipals"
datadatabricksserviceprincipals.DataDatabricksServicePrincipals_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -621,7 +630,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksserviceprincipals"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksserviceprincipals"
&datadatabricksserviceprincipals.DataDatabricksServicePrincipalsConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksServicePrincipals.java.md b/docs/dataDatabricksServicePrincipals.java.md
index ea3ecb361..36f5881f8 100644
--- a/docs/dataDatabricksServicePrincipals.java.md
+++ b/docs/dataDatabricksServicePrincipals.java.md
@@ -141,6 +141,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -209,6 +210,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksServicePrincipals.python.md b/docs/dataDatabricksServicePrincipals.python.md
index 54fdeb44a..2edd894a5 100644
--- a/docs/dataDatabricksServicePrincipals.python.md
+++ b/docs/dataDatabricksServicePrincipals.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -212,6 +213,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksServicePrincipals.typescript.md b/docs/dataDatabricksServicePrincipals.typescript.md
index 5a681204e..c9c9080ba 100644
--- a/docs/dataDatabricksServicePrincipals.typescript.md
+++ b/docs/dataDatabricksServicePrincipals.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksShare.csharp.md b/docs/dataDatabricksShare.csharp.md
index 459cb24ed..a09376d1c 100644
--- a/docs/dataDatabricksShare.csharp.md
+++ b/docs/dataDatabricksShare.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -125,6 +126,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
@@ -1194,6 +1203,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1201,6 +1211,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -1922,6 +1948,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1929,6 +1956,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -2367,6 +2410,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2374,6 +2418,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/dataDatabricksShare.go.md b/docs/dataDatabricksShare.go.md
index 8fd218c4d..f013bf040 100644
--- a/docs/dataDatabricksShare.go.md
+++ b/docs/dataDatabricksShare.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.NewDataDatabricksShare(scope Construct, id *string, config DataDatabricksShareConfig) DataDatabricksShare
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -125,6 +126,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -315,7 +324,7 @@ func ResetObject()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.DataDatabricksShare_IsConstruct(x interface{}) *bool
```
@@ -347,7 +356,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.DataDatabricksShare_IsTerraformElement(x interface{}) *bool
```
@@ -361,7 +370,7 @@ datadatabricksshare.DataDatabricksShare_IsTerraformElement(x interface{}) *bool
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.DataDatabricksShare_IsTerraformDataSource(x interface{}) *bool
```
@@ -375,7 +384,7 @@ datadatabricksshare.DataDatabricksShare_IsTerraformDataSource(x interface{}) *bo
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.DataDatabricksShare_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -692,7 +701,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
&datadatabricksshare.DataDatabricksShareConfig {
Connection: interface{},
@@ -869,7 +878,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
&datadatabricksshare.DataDatabricksShareObject {
DataObjectType: *string,
@@ -1043,7 +1052,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
&datadatabricksshare.DataDatabricksShareObjectPartition {
Value: interface{},
@@ -1077,7 +1086,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
&datadatabricksshare.DataDatabricksShareObjectPartitionValue {
Name: *string,
@@ -1153,7 +1162,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.NewDataDatabricksShareObjectList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksShareObjectList
```
@@ -1194,6 +1203,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1201,6 +1211,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1296,7 +1322,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.NewDataDatabricksShareObjectOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksShareObjectOutputReference
```
@@ -1881,7 +1907,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.NewDataDatabricksShareObjectPartitionList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksShareObjectPartitionList
```
@@ -1922,6 +1948,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1929,6 +1956,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2024,7 +2067,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.NewDataDatabricksShareObjectPartitionOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksShareObjectPartitionOutputReference
```
@@ -2326,7 +2369,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.NewDataDatabricksShareObjectPartitionValueList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksShareObjectPartitionValueList
```
@@ -2367,6 +2410,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2374,6 +2418,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2469,7 +2529,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshare"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshare"
datadatabricksshare.NewDataDatabricksShareObjectPartitionValueOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksShareObjectPartitionValueOutputReference
```
diff --git a/docs/dataDatabricksShare.java.md b/docs/dataDatabricksShare.java.md
index 653cc6a79..a1745f6d4 100644
--- a/docs/dataDatabricksShare.java.md
+++ b/docs/dataDatabricksShare.java.md
@@ -164,6 +164,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -235,6 +236,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
@@ -1312,6 +1321,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1319,6 +1329,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -2040,6 +2066,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2047,6 +2074,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -2485,6 +2528,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2492,6 +2536,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/dataDatabricksShare.python.md b/docs/dataDatabricksShare.python.md
index 5e42c7dee..af83c6bcc 100644
--- a/docs/dataDatabricksShare.python.md
+++ b/docs/dataDatabricksShare.python.md
@@ -161,6 +161,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -237,6 +238,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
@@ -1343,6 +1352,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1350,6 +1360,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -2108,6 +2136,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2115,6 +2144,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -2590,6 +2637,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2597,6 +2645,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/dataDatabricksShare.typescript.md b/docs/dataDatabricksShare.typescript.md
index 77db0bc34..ae55a7f92 100644
--- a/docs/dataDatabricksShare.typescript.md
+++ b/docs/dataDatabricksShare.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -125,6 +126,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
@@ -1162,6 +1171,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1169,6 +1179,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -1890,6 +1916,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1897,6 +1924,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -2335,6 +2378,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2342,6 +2386,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/dataDatabricksShares.csharp.md b/docs/dataDatabricksShares.csharp.md
index e73a68a43..bcb3ea469 100644
--- a/docs/dataDatabricksShares.csharp.md
+++ b/docs/dataDatabricksShares.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksShares.go.md b/docs/dataDatabricksShares.go.md
index 393490c58..de0104d9d 100644
--- a/docs/dataDatabricksShares.go.md
+++ b/docs/dataDatabricksShares.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshares"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshares"
datadatabricksshares.NewDataDatabricksShares(scope Construct, id *string, config DataDatabricksSharesConfig) DataDatabricksShares
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetShares()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshares"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshares"
datadatabricksshares.DataDatabricksShares_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshares"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshares"
datadatabricksshares.DataDatabricksShares_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksshares.DataDatabricksShares_IsTerraformElement(x interface{}) *boo
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshares"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshares"
datadatabricksshares.DataDatabricksShares_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksshares.DataDatabricksShares_IsTerraformDataSource(x interface{}) *
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshares"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshares"
datadatabricksshares.DataDatabricksShares_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -592,7 +601,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksshares"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksshares"
&datadatabricksshares.DataDatabricksSharesConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksShares.java.md b/docs/dataDatabricksShares.java.md
index 05d41964f..a84ae8307 100644
--- a/docs/dataDatabricksShares.java.md
+++ b/docs/dataDatabricksShares.java.md
@@ -131,6 +131,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -198,6 +199,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksShares.python.md b/docs/dataDatabricksShares.python.md
index d2d4ff336..6c0bdce17 100644
--- a/docs/dataDatabricksShares.python.md
+++ b/docs/dataDatabricksShares.python.md
@@ -129,6 +129,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -201,6 +202,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksShares.typescript.md b/docs/dataDatabricksShares.typescript.md
index 9bd222747..950803e2a 100644
--- a/docs/dataDatabricksShares.typescript.md
+++ b/docs/dataDatabricksShares.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksSparkVersion.csharp.md b/docs/dataDatabricksSparkVersion.csharp.md
index 9cec356e9..6ddbf3870 100644
--- a/docs/dataDatabricksSparkVersion.csharp.md
+++ b/docs/dataDatabricksSparkVersion.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -130,6 +131,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksSparkVersion.go.md b/docs/dataDatabricksSparkVersion.go.md
index fc74a1f50..3f1cbcf2d 100644
--- a/docs/dataDatabricksSparkVersion.go.md
+++ b/docs/dataDatabricksSparkVersion.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssparkversion"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssparkversion"
datadatabrickssparkversion.NewDataDatabricksSparkVersion(scope Construct, id *string, config DataDatabricksSparkVersionConfig) DataDatabricksSparkVersion
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -130,6 +131,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -344,7 +353,7 @@ func ResetSparkVersion()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssparkversion"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssparkversion"
datadatabrickssparkversion.DataDatabricksSparkVersion_IsConstruct(x interface{}) *bool
```
@@ -376,7 +385,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssparkversion"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssparkversion"
datadatabrickssparkversion.DataDatabricksSparkVersion_IsTerraformElement(x interface{}) *bool
```
@@ -390,7 +399,7 @@ datadatabrickssparkversion.DataDatabricksSparkVersion_IsTerraformElement(x inter
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssparkversion"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssparkversion"
datadatabrickssparkversion.DataDatabricksSparkVersion_IsTerraformDataSource(x interface{}) *bool
```
@@ -404,7 +413,7 @@ datadatabrickssparkversion.DataDatabricksSparkVersion_IsTerraformDataSource(x in
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssparkversion"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssparkversion"
datadatabrickssparkversion.DataDatabricksSparkVersion_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -853,7 +862,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssparkversion"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssparkversion"
&datadatabrickssparkversion.DataDatabricksSparkVersionConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksSparkVersion.java.md b/docs/dataDatabricksSparkVersion.java.md
index a6e37429b..88cdb3cd5 100644
--- a/docs/dataDatabricksSparkVersion.java.md
+++ b/docs/dataDatabricksSparkVersion.java.md
@@ -229,6 +229,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -305,6 +306,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksSparkVersion.python.md b/docs/dataDatabricksSparkVersion.python.md
index 001a58944..360043866 100644
--- a/docs/dataDatabricksSparkVersion.python.md
+++ b/docs/dataDatabricksSparkVersion.python.md
@@ -219,6 +219,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -300,6 +301,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksSparkVersion.typescript.md b/docs/dataDatabricksSparkVersion.typescript.md
index fe218ac79..7fef2acd2 100644
--- a/docs/dataDatabricksSparkVersion.typescript.md
+++ b/docs/dataDatabricksSparkVersion.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -130,6 +131,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksSqlWarehouse.csharp.md b/docs/dataDatabricksSqlWarehouse.csharp.md
index d67ace74e..7988e881b 100644
--- a/docs/dataDatabricksSqlWarehouse.csharp.md
+++ b/docs/dataDatabricksSqlWarehouse.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -139,6 +140,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
@@ -2309,6 +2318,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2316,6 +2326,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/dataDatabricksSqlWarehouse.go.md b/docs/dataDatabricksSqlWarehouse.go.md
index 3703b9378..50b449edf 100644
--- a/docs/dataDatabricksSqlWarehouse.go.md
+++ b/docs/dataDatabricksSqlWarehouse.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.NewDataDatabricksSqlWarehouse(scope Construct, id *string, config DataDatabricksSqlWarehouseConfig) DataDatabricksSqlWarehouse
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -139,6 +140,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -425,7 +434,7 @@ func ResetTags()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.DataDatabricksSqlWarehouse_IsConstruct(x interface{}) *bool
```
@@ -457,7 +466,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.DataDatabricksSqlWarehouse_IsTerraformElement(x interface{}) *bool
```
@@ -471,7 +480,7 @@ datadatabrickssqlwarehouse.DataDatabricksSqlWarehouse_IsTerraformElement(x inter
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.DataDatabricksSqlWarehouse_IsTerraformDataSource(x interface{}) *bool
```
@@ -485,7 +494,7 @@ datadatabrickssqlwarehouse.DataDatabricksSqlWarehouse_IsTerraformDataSource(x in
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.DataDatabricksSqlWarehouse_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1066,7 +1075,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
&datadatabrickssqlwarehouse.DataDatabricksSqlWarehouseChannel {
Name: *string,
@@ -1098,7 +1107,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
&datadatabrickssqlwarehouse.DataDatabricksSqlWarehouseConfig {
Connection: interface{},
@@ -1109,7 +1118,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
Provider: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformProvider,
Provisioners: *[]interface{},
AutoStopMins: *f64,
- Channel: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksSqlWarehouse.DataDatabricksSqlWarehouseChannel,
+ Channel: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksSqlWarehouse.DataDatabricksSqlWarehouseChannel,
ClusterSize: *string,
DataSourceId: *string,
EnablePhoton: interface{},
@@ -1121,10 +1130,10 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabri
MinNumClusters: *f64,
Name: *string,
NumClusters: *f64,
- OdbcParams: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksSqlWarehouse.DataDatabricksSqlWarehouseOdbcParams,
+ OdbcParams: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksSqlWarehouse.DataDatabricksSqlWarehouseOdbcParams,
SpotInstancePolicy: *string,
State: *string,
- Tags: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.dataDatabricksSqlWarehouse.DataDatabricksSqlWarehouseTags,
+ Tags: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.dataDatabricksSqlWarehouse.DataDatabricksSqlWarehouseTags,
}
```
@@ -1447,7 +1456,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
&datadatabrickssqlwarehouse.DataDatabricksSqlWarehouseOdbcParams {
Path: *string,
@@ -1535,7 +1544,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
&datadatabrickssqlwarehouse.DataDatabricksSqlWarehouseTags {
CustomTags: interface{},
@@ -1569,7 +1578,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
&datadatabrickssqlwarehouse.DataDatabricksSqlWarehouseTagsCustomTags {
Key: *string,
@@ -1617,7 +1626,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.NewDataDatabricksSqlWarehouseChannelOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksSqlWarehouseChannelOutputReference
```
@@ -1895,7 +1904,7 @@ func InternalValue() DataDatabricksSqlWarehouseChannel
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.NewDataDatabricksSqlWarehouseOdbcParamsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksSqlWarehouseOdbcParamsOutputReference
```
@@ -2268,7 +2277,7 @@ func InternalValue() DataDatabricksSqlWarehouseOdbcParams
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.NewDataDatabricksSqlWarehouseTagsCustomTagsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) DataDatabricksSqlWarehouseTagsCustomTagsList
```
@@ -2309,6 +2318,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2316,6 +2326,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2411,7 +2437,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.NewDataDatabricksSqlWarehouseTagsCustomTagsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) DataDatabricksSqlWarehouseTagsCustomTagsOutputReference
```
@@ -2722,7 +2748,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouse"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouse"
datadatabrickssqlwarehouse.NewDataDatabricksSqlWarehouseTagsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DataDatabricksSqlWarehouseTagsOutputReference
```
diff --git a/docs/dataDatabricksSqlWarehouse.java.md b/docs/dataDatabricksSqlWarehouse.java.md
index c32545f55..25ea50ce3 100644
--- a/docs/dataDatabricksSqlWarehouse.java.md
+++ b/docs/dataDatabricksSqlWarehouse.java.md
@@ -289,6 +289,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -374,6 +375,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
@@ -2551,6 +2560,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2558,6 +2568,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/dataDatabricksSqlWarehouse.python.md b/docs/dataDatabricksSqlWarehouse.python.md
index c9bc92049..d7b46409a 100644
--- a/docs/dataDatabricksSqlWarehouse.python.md
+++ b/docs/dataDatabricksSqlWarehouse.python.md
@@ -285,6 +285,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -375,6 +376,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
@@ -2680,6 +2689,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2687,6 +2697,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/dataDatabricksSqlWarehouse.typescript.md b/docs/dataDatabricksSqlWarehouse.typescript.md
index 8fd658702..0127483a9 100644
--- a/docs/dataDatabricksSqlWarehouse.typescript.md
+++ b/docs/dataDatabricksSqlWarehouse.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -139,6 +140,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
@@ -2271,6 +2280,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2278,6 +2288,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/dataDatabricksSqlWarehouses.csharp.md b/docs/dataDatabricksSqlWarehouses.csharp.md
index 962f7cf7c..fa0587366 100644
--- a/docs/dataDatabricksSqlWarehouses.csharp.md
+++ b/docs/dataDatabricksSqlWarehouses.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksSqlWarehouses.go.md b/docs/dataDatabricksSqlWarehouses.go.md
index 374e4f082..5793178d6 100644
--- a/docs/dataDatabricksSqlWarehouses.go.md
+++ b/docs/dataDatabricksSqlWarehouses.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouses"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouses"
datadatabrickssqlwarehouses.NewDataDatabricksSqlWarehouses(scope Construct, id *string, config DataDatabricksSqlWarehousesConfig) DataDatabricksSqlWarehouses
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -288,7 +297,7 @@ func ResetWarehouseNameContains()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouses"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouses"
datadatabrickssqlwarehouses.DataDatabricksSqlWarehouses_IsConstruct(x interface{}) *bool
```
@@ -320,7 +329,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouses"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouses"
datadatabrickssqlwarehouses.DataDatabricksSqlWarehouses_IsTerraformElement(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabrickssqlwarehouses.DataDatabricksSqlWarehouses_IsTerraformElement(x int
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouses"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouses"
datadatabrickssqlwarehouses.DataDatabricksSqlWarehouses_IsTerraformDataSource(x interface{}) *bool
```
@@ -348,7 +357,7 @@ datadatabrickssqlwarehouses.DataDatabricksSqlWarehouses_IsTerraformDataSource(x
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouses"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouses"
datadatabrickssqlwarehouses.DataDatabricksSqlWarehouses_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -621,7 +630,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickssqlwarehouses"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickssqlwarehouses"
&datadatabrickssqlwarehouses.DataDatabricksSqlWarehousesConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksSqlWarehouses.java.md b/docs/dataDatabricksSqlWarehouses.java.md
index c65e7dc7a..4708e1033 100644
--- a/docs/dataDatabricksSqlWarehouses.java.md
+++ b/docs/dataDatabricksSqlWarehouses.java.md
@@ -141,6 +141,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -209,6 +210,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksSqlWarehouses.python.md b/docs/dataDatabricksSqlWarehouses.python.md
index bb08eba98..5d97f4fcd 100644
--- a/docs/dataDatabricksSqlWarehouses.python.md
+++ b/docs/dataDatabricksSqlWarehouses.python.md
@@ -139,6 +139,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -212,6 +213,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksSqlWarehouses.typescript.md b/docs/dataDatabricksSqlWarehouses.typescript.md
index 109a08084..b1ba4c346 100644
--- a/docs/dataDatabricksSqlWarehouses.typescript.md
+++ b/docs/dataDatabricksSqlWarehouses.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksTables.csharp.md b/docs/dataDatabricksTables.csharp.md
index fef09f27d..7c6439712 100644
--- a/docs/dataDatabricksTables.csharp.md
+++ b/docs/dataDatabricksTables.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksTables.go.md b/docs/dataDatabricksTables.go.md
index 7fde75832..36239782e 100644
--- a/docs/dataDatabricksTables.go.md
+++ b/docs/dataDatabricksTables.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickstables"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickstables"
datadatabrickstables.NewDataDatabricksTables(scope Construct, id *string, config DataDatabricksTablesConfig) DataDatabricksTables
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetIds()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickstables"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickstables"
datadatabrickstables.DataDatabricksTables_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickstables"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickstables"
datadatabrickstables.DataDatabricksTables_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabrickstables.DataDatabricksTables_IsTerraformElement(x interface{}) *boo
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickstables"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickstables"
datadatabrickstables.DataDatabricksTables_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabrickstables.DataDatabricksTables_IsTerraformDataSource(x interface{}) *
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickstables"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickstables"
datadatabrickstables.DataDatabricksTables_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -636,7 +645,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickstables"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickstables"
&datadatabrickstables.DataDatabricksTablesConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksTables.java.md b/docs/dataDatabricksTables.java.md
index 1072eddbb..a7dbf2038 100644
--- a/docs/dataDatabricksTables.java.md
+++ b/docs/dataDatabricksTables.java.md
@@ -151,6 +151,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -218,6 +219,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksTables.python.md b/docs/dataDatabricksTables.python.md
index 9468065a0..997f40b22 100644
--- a/docs/dataDatabricksTables.python.md
+++ b/docs/dataDatabricksTables.python.md
@@ -149,6 +149,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -221,6 +222,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksTables.typescript.md b/docs/dataDatabricksTables.typescript.md
index 5fe094d9d..ade06628e 100644
--- a/docs/dataDatabricksTables.typescript.md
+++ b/docs/dataDatabricksTables.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksUser.csharp.md b/docs/dataDatabricksUser.csharp.md
index 31182266f..184f63953 100644
--- a/docs/dataDatabricksUser.csharp.md
+++ b/docs/dataDatabricksUser.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksUser.go.md b/docs/dataDatabricksUser.go.md
index 59c8e4b44..89918c205 100644
--- a/docs/dataDatabricksUser.go.md
+++ b/docs/dataDatabricksUser.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksuser"
datadatabricksuser.NewDataDatabricksUser(scope Construct, id *string, config DataDatabricksUserConfig) DataDatabricksUser
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -288,7 +297,7 @@ func ResetUserName()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksuser"
datadatabricksuser.DataDatabricksUser_IsConstruct(x interface{}) *bool
```
@@ -320,7 +329,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksuser"
datadatabricksuser.DataDatabricksUser_IsTerraformElement(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabricksuser.DataDatabricksUser_IsTerraformElement(x interface{}) *bool
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksuser"
datadatabricksuser.DataDatabricksUser_IsTerraformDataSource(x interface{}) *bool
```
@@ -348,7 +357,7 @@ datadatabricksuser.DataDatabricksUser_IsTerraformDataSource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksuser"
datadatabricksuser.DataDatabricksUser_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -698,7 +707,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksuser"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksuser"
&datadatabricksuser.DataDatabricksUserConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksUser.java.md b/docs/dataDatabricksUser.java.md
index a7664bb06..3dcbc5285 100644
--- a/docs/dataDatabricksUser.java.md
+++ b/docs/dataDatabricksUser.java.md
@@ -141,6 +141,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -209,6 +210,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksUser.python.md b/docs/dataDatabricksUser.python.md
index dacc50c9a..68f5ab482 100644
--- a/docs/dataDatabricksUser.python.md
+++ b/docs/dataDatabricksUser.python.md
@@ -139,6 +139,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -212,6 +213,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksUser.typescript.md b/docs/dataDatabricksUser.typescript.md
index 8fcd4ba37..03e391678 100644
--- a/docs/dataDatabricksUser.typescript.md
+++ b/docs/dataDatabricksUser.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -122,6 +123,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksViews.csharp.md b/docs/dataDatabricksViews.csharp.md
index 6d1ed1d00..b4b4c838f 100644
--- a/docs/dataDatabricksViews.csharp.md
+++ b/docs/dataDatabricksViews.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksViews.go.md b/docs/dataDatabricksViews.go.md
index cfa72cf3c..073245c60 100644
--- a/docs/dataDatabricksViews.go.md
+++ b/docs/dataDatabricksViews.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksviews"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksviews"
datadatabricksviews.NewDataDatabricksViews(scope Construct, id *string, config DataDatabricksViewsConfig) DataDatabricksViews
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -281,7 +290,7 @@ func ResetIds()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksviews"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksviews"
datadatabricksviews.DataDatabricksViews_IsConstruct(x interface{}) *bool
```
@@ -313,7 +322,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksviews"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksviews"
datadatabricksviews.DataDatabricksViews_IsTerraformElement(x interface{}) *bool
```
@@ -327,7 +336,7 @@ datadatabricksviews.DataDatabricksViews_IsTerraformElement(x interface{}) *bool
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksviews"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksviews"
datadatabricksviews.DataDatabricksViews_IsTerraformDataSource(x interface{}) *bool
```
@@ -341,7 +350,7 @@ datadatabricksviews.DataDatabricksViews_IsTerraformDataSource(x interface{}) *bo
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksviews"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksviews"
datadatabricksviews.DataDatabricksViews_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -636,7 +645,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabricksviews"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabricksviews"
&datadatabricksviews.DataDatabricksViewsConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksViews.java.md b/docs/dataDatabricksViews.java.md
index 0772935f9..1f0d3701f 100644
--- a/docs/dataDatabricksViews.java.md
+++ b/docs/dataDatabricksViews.java.md
@@ -151,6 +151,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -218,6 +219,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksViews.python.md b/docs/dataDatabricksViews.python.md
index 736e7710d..bb15d20b8 100644
--- a/docs/dataDatabricksViews.python.md
+++ b/docs/dataDatabricksViews.python.md
@@ -149,6 +149,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -221,6 +222,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksViews.typescript.md b/docs/dataDatabricksViews.typescript.md
index 1dbc75bab..5741d25b5 100644
--- a/docs/dataDatabricksViews.typescript.md
+++ b/docs/dataDatabricksViews.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -121,6 +122,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dataDatabricksZones.csharp.md b/docs/dataDatabricksZones.csharp.md
index 1ca0d7c74..cf77b8eca 100644
--- a/docs/dataDatabricksZones.csharp.md
+++ b/docs/dataDatabricksZones.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```csharp
diff --git a/docs/dataDatabricksZones.go.md b/docs/dataDatabricksZones.go.md
index e11cceb59..951114532 100644
--- a/docs/dataDatabricksZones.go.md
+++ b/docs/dataDatabricksZones.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickszones"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickszones"
datadatabrickszones.NewDataDatabricksZones(scope Construct, id *string, config DataDatabricksZonesConfig) DataDatabricksZones
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| Adds this resource to the terraform JSON output. |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| GetAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
+Adds this resource to the terraform JSON output.
+
##### `ToMetadata`
```go
@@ -274,7 +283,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickszones"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickszones"
datadatabrickszones.DataDatabricksZones_IsConstruct(x interface{}) *bool
```
@@ -306,7 +315,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickszones"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickszones"
datadatabrickszones.DataDatabricksZones_IsTerraformElement(x interface{}) *bool
```
@@ -320,7 +329,7 @@ datadatabrickszones.DataDatabricksZones_IsTerraformElement(x interface{}) *bool
##### `IsTerraformDataSource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickszones"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickszones"
datadatabrickszones.DataDatabricksZones_IsTerraformDataSource(x interface{}) *bool
```
@@ -334,7 +343,7 @@ datadatabrickszones.DataDatabricksZones_IsTerraformDataSource(x interface{}) *bo
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickszones"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickszones"
datadatabrickszones.DataDatabricksZones_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -585,7 +594,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/datadatabrickszones"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/datadatabrickszones"
&datadatabrickszones.DataDatabricksZonesConfig {
Connection: interface{},
diff --git a/docs/dataDatabricksZones.java.md b/docs/dataDatabricksZones.java.md
index 86149461b..d28b0988b 100644
--- a/docs/dataDatabricksZones.java.md
+++ b/docs/dataDatabricksZones.java.md
@@ -121,6 +121,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -187,6 +188,14 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```java
diff --git a/docs/dataDatabricksZones.python.md b/docs/dataDatabricksZones.python.md
index 959a8a11b..e33c2411e 100644
--- a/docs/dataDatabricksZones.python.md
+++ b/docs/dataDatabricksZones.python.md
@@ -119,6 +119,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| Adds this resource to the terraform JSON output. |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| get_any_map_attribute
| *No description.* |
@@ -190,6 +191,14 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `to_metadata`
```python
diff --git a/docs/dataDatabricksZones.typescript.md b/docs/dataDatabricksZones.typescript.md
index 9b1f4820d..11d31eafa 100644
--- a/docs/dataDatabricksZones.typescript.md
+++ b/docs/dataDatabricksZones.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| Adds this resource to the terraform JSON output. |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| getAnyMapAttribute
| *No description.* |
@@ -120,6 +121,14 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
+Adds this resource to the terraform JSON output.
+
##### `toMetadata`
```typescript
diff --git a/docs/dbfsFile.csharp.md b/docs/dbfsFile.csharp.md
index 1a45ab94f..73723ec19 100644
--- a/docs/dbfsFile.csharp.md
+++ b/docs/dbfsFile.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/dbfsFile.go.md b/docs/dbfsFile.go.md
index c7f3ddea6..7048bef66 100644
--- a/docs/dbfsFile.go.md
+++ b/docs/dbfsFile.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/dbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/dbfsfile"
dbfsfile.NewDbfsFile(scope Construct, id *string, config DbfsFileConfig) DbfsFile
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -399,7 +406,7 @@ func ResetSource()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/dbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/dbfsfile"
dbfsfile.DbfsFile_IsConstruct(x interface{}) *bool
```
@@ -431,7 +438,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/dbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/dbfsfile"
dbfsfile.DbfsFile_IsTerraformElement(x interface{}) *bool
```
@@ -445,7 +452,7 @@ dbfsfile.DbfsFile_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/dbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/dbfsfile"
dbfsfile.DbfsFile_IsTerraformResource(x interface{}) *bool
```
@@ -459,7 +466,7 @@ dbfsfile.DbfsFile_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/dbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/dbfsfile"
dbfsfile.DbfsFile_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -820,7 +827,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/dbfsfile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/dbfsfile"
&dbfsfile.DbfsFileConfig {
Connection: interface{},
diff --git a/docs/dbfsFile.java.md b/docs/dbfsFile.java.md
index 0631061e2..b34dc32c9 100644
--- a/docs/dbfsFile.java.md
+++ b/docs/dbfsFile.java.md
@@ -161,6 +161,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -236,6 +237,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/dbfsFile.python.md b/docs/dbfsFile.python.md
index 9ca157fbb..48d4c5e8a 100644
--- a/docs/dbfsFile.python.md
+++ b/docs/dbfsFile.python.md
@@ -159,6 +159,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -239,6 +240,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/dbfsFile.typescript.md b/docs/dbfsFile.typescript.md
index 91723c2fe..5a875d2ec 100644
--- a/docs/dbfsFile.typescript.md
+++ b/docs/dbfsFile.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/defaultNamespaceSetting.csharp.md b/docs/defaultNamespaceSetting.csharp.md
index d0740a322..501cf0e60 100644
--- a/docs/defaultNamespaceSetting.csharp.md
+++ b/docs/defaultNamespaceSetting.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/defaultNamespaceSetting.go.md b/docs/defaultNamespaceSetting.go.md
index 8b953ce75..e31d547a4 100644
--- a/docs/defaultNamespaceSetting.go.md
+++ b/docs/defaultNamespaceSetting.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/defaultnamespacesetting"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/defaultnamespacesetting"
defaultnamespacesetting.NewDefaultNamespaceSetting(scope Construct, id *string, config DefaultNamespaceSettingConfig) DefaultNamespaceSetting
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -405,7 +412,7 @@ func ResetSettingName()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/defaultnamespacesetting"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/defaultnamespacesetting"
defaultnamespacesetting.DefaultNamespaceSetting_IsConstruct(x interface{}) *bool
```
@@ -437,7 +444,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/defaultnamespacesetting"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/defaultnamespacesetting"
defaultnamespacesetting.DefaultNamespaceSetting_IsTerraformElement(x interface{}) *bool
```
@@ -451,7 +458,7 @@ defaultnamespacesetting.DefaultNamespaceSetting_IsTerraformElement(x interface{}
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/defaultnamespacesetting"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/defaultnamespacesetting"
defaultnamespacesetting.DefaultNamespaceSetting_IsTerraformResource(x interface{}) *bool
```
@@ -465,7 +472,7 @@ defaultnamespacesetting.DefaultNamespaceSetting_IsTerraformResource(x interface{
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/defaultnamespacesetting"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/defaultnamespacesetting"
defaultnamespacesetting.DefaultNamespaceSetting_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -782,7 +789,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/defaultnamespacesetting"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/defaultnamespacesetting"
&defaultnamespacesetting.DefaultNamespaceSettingConfig {
Connection: interface{},
@@ -792,7 +799,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/defaultname
Lifecycle: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformResourceLifecycle,
Provider: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformProvider,
Provisioners: *[]interface{},
- Namespace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.defaultNamespaceSetting.DefaultNamespaceSettingNamespace,
+ Namespace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.defaultNamespaceSetting.DefaultNamespaceSettingNamespace,
Etag: *string,
Id: *string,
SettingName: *string,
@@ -945,7 +952,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/defaultnamespacesetting"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/defaultnamespacesetting"
&defaultnamespacesetting.DefaultNamespaceSettingNamespace {
Value: *string,
@@ -979,7 +986,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/defaultnamespacesetting"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/defaultnamespacesetting"
defaultnamespacesetting.NewDefaultNamespaceSettingNamespaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) DefaultNamespaceSettingNamespaceOutputReference
```
diff --git a/docs/defaultNamespaceSetting.java.md b/docs/defaultNamespaceSetting.java.md
index cb5475a9a..a02f20838 100644
--- a/docs/defaultNamespaceSetting.java.md
+++ b/docs/defaultNamespaceSetting.java.md
@@ -153,6 +153,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -228,6 +229,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/defaultNamespaceSetting.python.md b/docs/defaultNamespaceSetting.python.md
index 486e767a6..0dd6e5ea2 100644
--- a/docs/defaultNamespaceSetting.python.md
+++ b/docs/defaultNamespaceSetting.python.md
@@ -151,6 +151,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -231,6 +232,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/defaultNamespaceSetting.typescript.md b/docs/defaultNamespaceSetting.typescript.md
index 83ea75273..6b41fd59d 100644
--- a/docs/defaultNamespaceSetting.typescript.md
+++ b/docs/defaultNamespaceSetting.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/directory.csharp.md b/docs/directory.csharp.md
index 13026c8fb..2f2c34571 100644
--- a/docs/directory.csharp.md
+++ b/docs/directory.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/directory.go.md b/docs/directory.go.md
index 3ec5321aa..64b708364 100644
--- a/docs/directory.go.md
+++ b/docs/directory.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/directory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/directory"
directory.NewDirectory(scope Construct, id *string, config DirectoryConfig) Directory
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -392,7 +399,7 @@ func ResetObjectId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/directory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/directory"
directory.Directory_IsConstruct(x interface{}) *bool
```
@@ -424,7 +431,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/directory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/directory"
directory.Directory_IsTerraformElement(x interface{}) *bool
```
@@ -438,7 +445,7 @@ directory.Directory_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/directory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/directory"
directory.Directory_IsTerraformResource(x interface{}) *bool
```
@@ -452,7 +459,7 @@ directory.Directory_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/directory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/directory"
directory.Directory_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -769,7 +776,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/directory"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/directory"
&directory.DirectoryConfig {
Connection: interface{},
diff --git a/docs/directory.java.md b/docs/directory.java.md
index 23ec5f30c..01fb1dc97 100644
--- a/docs/directory.java.md
+++ b/docs/directory.java.md
@@ -152,6 +152,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -226,6 +227,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/directory.python.md b/docs/directory.python.md
index 0dea5c751..582ad1a43 100644
--- a/docs/directory.python.md
+++ b/docs/directory.python.md
@@ -149,6 +149,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -228,6 +229,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/directory.typescript.md b/docs/directory.typescript.md
index bdf051ec3..3a1a6a9f2 100644
--- a/docs/directory.typescript.md
+++ b/docs/directory.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/entitlements.csharp.md b/docs/entitlements.csharp.md
index 425dac026..a8cc862a9 100644
--- a/docs/entitlements.csharp.md
+++ b/docs/entitlements.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/entitlements.go.md b/docs/entitlements.go.md
index ebee199a5..0b545d906 100644
--- a/docs/entitlements.go.md
+++ b/docs/entitlements.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/entitlements"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/entitlements"
entitlements.NewEntitlements(scope Construct, id *string, config EntitlementsConfig) Entitlements
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -427,7 +434,7 @@ func ResetWorkspaceAccess()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/entitlements"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/entitlements"
entitlements.Entitlements_IsConstruct(x interface{}) *bool
```
@@ -459,7 +466,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/entitlements"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/entitlements"
entitlements.Entitlements_IsTerraformElement(x interface{}) *bool
```
@@ -473,7 +480,7 @@ entitlements.Entitlements_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/entitlements"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/entitlements"
entitlements.Entitlements_IsTerraformResource(x interface{}) *bool
```
@@ -487,7 +494,7 @@ entitlements.Entitlements_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/entitlements"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/entitlements"
entitlements.Entitlements_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -892,7 +899,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/entitlements"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/entitlements"
&entitlements.EntitlementsConfig {
Connection: interface{},
diff --git a/docs/entitlements.java.md b/docs/entitlements.java.md
index c6121727a..3e9d564bf 100644
--- a/docs/entitlements.java.md
+++ b/docs/entitlements.java.md
@@ -195,6 +195,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -274,6 +275,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/entitlements.python.md b/docs/entitlements.python.md
index 1f3bec5f3..65a26d1ed 100644
--- a/docs/entitlements.python.md
+++ b/docs/entitlements.python.md
@@ -189,6 +189,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -273,6 +274,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/entitlements.typescript.md b/docs/entitlements.typescript.md
index c01afa74e..a171ac687 100644
--- a/docs/entitlements.typescript.md
+++ b/docs/entitlements.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/externalLocation.csharp.md b/docs/externalLocation.csharp.md
index 5bf2dddee..9d3fea877 100644
--- a/docs/externalLocation.csharp.md
+++ b/docs/externalLocation.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -136,6 +137,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/externalLocation.go.md b/docs/externalLocation.go.md
index 8dcbb2179..540953a30 100644
--- a/docs/externalLocation.go.md
+++ b/docs/externalLocation.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
externallocation.NewExternalLocation(scope Construct, id *string, config ExternalLocationConfig) ExternalLocation
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -136,6 +137,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -454,7 +461,7 @@ func ResetSkipValidation()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
externallocation.ExternalLocation_IsConstruct(x interface{}) *bool
```
@@ -486,7 +493,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
externallocation.ExternalLocation_IsTerraformElement(x interface{}) *bool
```
@@ -500,7 +507,7 @@ externallocation.ExternalLocation_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
externallocation.ExternalLocation_IsTerraformResource(x interface{}) *bool
```
@@ -514,7 +521,7 @@ externallocation.ExternalLocation_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
externallocation.ExternalLocation_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1029,7 +1036,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
&externallocation.ExternalLocationConfig {
Connection: interface{},
@@ -1044,7 +1051,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externalloc
Url: *string,
AccessPoint: *string,
Comment: *string,
- EncryptionDetails: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.externalLocation.ExternalLocationEncryptionDetails,
+ EncryptionDetails: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.externalLocation.ExternalLocationEncryptionDetails,
ForceDestroy: interface{},
ForceUpdate: interface{},
Id: *string,
@@ -1318,10 +1325,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
&externallocation.ExternalLocationEncryptionDetails {
- SseEncryptionDetails: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.externalLocation.ExternalLocationEncryptionDetailsSseEncryptionDetails,
+ SseEncryptionDetails: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.externalLocation.ExternalLocationEncryptionDetailsSseEncryptionDetails,
}
```
@@ -1352,7 +1359,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
&externallocation.ExternalLocationEncryptionDetailsSseEncryptionDetails {
Algorithm: *string,
@@ -1400,7 +1407,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
externallocation.NewExternalLocationEncryptionDetailsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ExternalLocationEncryptionDetailsOutputReference
```
@@ -1691,7 +1698,7 @@ func InternalValue() ExternalLocationEncryptionDetails
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/externallocation"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/externallocation"
externallocation.NewExternalLocationEncryptionDetailsSseEncryptionDetailsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ExternalLocationEncryptionDetailsSseEncryptionDetailsOutputReference
```
diff --git a/docs/externalLocation.java.md b/docs/externalLocation.java.md
index 5ebe6cca8..148eb5864 100644
--- a/docs/externalLocation.java.md
+++ b/docs/externalLocation.java.md
@@ -247,6 +247,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -329,6 +330,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/externalLocation.python.md b/docs/externalLocation.python.md
index b37445cad..0c998d919 100644
--- a/docs/externalLocation.python.md
+++ b/docs/externalLocation.python.md
@@ -241,6 +241,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -328,6 +329,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/externalLocation.typescript.md b/docs/externalLocation.typescript.md
index 42478d68a..f61f3abdf 100644
--- a/docs/externalLocation.typescript.md
+++ b/docs/externalLocation.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -136,6 +137,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/gitCredential.csharp.md b/docs/gitCredential.csharp.md
index 6557e1401..bbb4a9c70 100644
--- a/docs/gitCredential.csharp.md
+++ b/docs/gitCredential.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/gitCredential.go.md b/docs/gitCredential.go.md
index cb81c4c40..98d3f03d3 100644
--- a/docs/gitCredential.go.md
+++ b/docs/gitCredential.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/gitcredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/gitcredential"
gitcredential.NewGitCredential(scope Construct, id *string, config GitCredentialConfig) GitCredential
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -399,7 +406,7 @@ func ResetPersonalAccessToken()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/gitcredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/gitcredential"
gitcredential.GitCredential_IsConstruct(x interface{}) *bool
```
@@ -431,7 +438,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/gitcredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/gitcredential"
gitcredential.GitCredential_IsTerraformElement(x interface{}) *bool
```
@@ -445,7 +452,7 @@ gitcredential.GitCredential_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/gitcredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/gitcredential"
gitcredential.GitCredential_IsTerraformResource(x interface{}) *bool
```
@@ -459,7 +466,7 @@ gitcredential.GitCredential_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/gitcredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/gitcredential"
gitcredential.GitCredential_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -798,7 +805,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/gitcredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/gitcredential"
&gitcredential.GitCredentialConfig {
Connection: interface{},
diff --git a/docs/gitCredential.java.md b/docs/gitCredential.java.md
index fd0dd0ff9..2faefa091 100644
--- a/docs/gitCredential.java.md
+++ b/docs/gitCredential.java.md
@@ -162,6 +162,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -237,6 +238,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/gitCredential.python.md b/docs/gitCredential.python.md
index 491930a00..9af14cde8 100644
--- a/docs/gitCredential.python.md
+++ b/docs/gitCredential.python.md
@@ -159,6 +159,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -239,6 +240,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/gitCredential.typescript.md b/docs/gitCredential.typescript.md
index 3d15f39fd..55ca05e3c 100644
--- a/docs/gitCredential.typescript.md
+++ b/docs/gitCredential.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/globalInitScript.csharp.md b/docs/globalInitScript.csharp.md
index 191e37c12..815529adf 100644
--- a/docs/globalInitScript.csharp.md
+++ b/docs/globalInitScript.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/globalInitScript.go.md b/docs/globalInitScript.go.md
index 1fe7caba1..596ec5723 100644
--- a/docs/globalInitScript.go.md
+++ b/docs/globalInitScript.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinitscript"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/globalinitscript"
globalinitscript.NewGlobalInitScript(scope Construct, id *string, config GlobalInitScriptConfig) GlobalInitScript
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -426,7 +433,7 @@ func ResetSource()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinitscript"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/globalinitscript"
globalinitscript.GlobalInitScript_IsConstruct(x interface{}) *bool
```
@@ -458,7 +465,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinitscript"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/globalinitscript"
globalinitscript.GlobalInitScript_IsTerraformElement(x interface{}) *bool
```
@@ -472,7 +479,7 @@ globalinitscript.GlobalInitScript_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinitscript"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/globalinitscript"
globalinitscript.GlobalInitScript_IsTerraformResource(x interface{}) *bool
```
@@ -486,7 +493,7 @@ globalinitscript.GlobalInitScript_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinitscript"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/globalinitscript"
globalinitscript.GlobalInitScript_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -891,7 +898,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinitscript"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/globalinitscript"
&globalinitscript.GlobalInitScriptConfig {
Connection: interface{},
@@ -908,7 +915,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinits
Md5: *string,
Position: *f64,
Source: *string,
- Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.globalInitScript.GlobalInitScriptTimeouts,
+ Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.globalInitScript.GlobalInitScriptTimeouts,
}
```
@@ -1110,7 +1117,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinitscript"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/globalinitscript"
&globalinitscript.GlobalInitScriptTimeouts {
@@ -1125,7 +1132,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinits
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/globalinitscript"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/globalinitscript"
globalinitscript.NewGlobalInitScriptTimeoutsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) GlobalInitScriptTimeoutsOutputReference
```
diff --git a/docs/globalInitScript.java.md b/docs/globalInitScript.java.md
index 46ccd7d3f..2acd1c804 100644
--- a/docs/globalInitScript.java.md
+++ b/docs/globalInitScript.java.md
@@ -194,6 +194,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -272,6 +273,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/globalInitScript.python.md b/docs/globalInitScript.python.md
index c80f6b46a..e3acca3bf 100644
--- a/docs/globalInitScript.python.md
+++ b/docs/globalInitScript.python.md
@@ -191,6 +191,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -274,6 +275,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/globalInitScript.typescript.md b/docs/globalInitScript.typescript.md
index e0542fa0b..8ceca68ff 100644
--- a/docs/globalInitScript.typescript.md
+++ b/docs/globalInitScript.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/grants.csharp.md b/docs/grants.csharp.md
index 803aba868..81d7bc3d8 100644
--- a/docs/grants.csharp.md
+++ b/docs/grants.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -140,6 +141,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1507,6 +1514,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1514,6 +1522,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/grants.go.md b/docs/grants.go.md
index ae976a2b5..7a9a28ff6 100644
--- a/docs/grants.go.md
+++ b/docs/grants.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grants"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grants"
grants.NewGrants(scope Construct, id *string, config GrantsConfig) Grants
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -140,6 +141,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -482,7 +489,7 @@ func ResetVolume()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grants"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grants"
grants.Grants_IsConstruct(x interface{}) *bool
```
@@ -514,7 +521,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grants"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grants"
grants.Grants_IsTerraformElement(x interface{}) *bool
```
@@ -528,7 +535,7 @@ grants.Grants_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grants"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grants"
grants.Grants_IsTerraformResource(x interface{}) *bool
```
@@ -542,7 +549,7 @@ grants.Grants_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grants"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grants"
grants.Grants_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1101,7 +1108,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grants"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grants"
&grants.GrantsConfig {
Connection: interface{},
@@ -1418,7 +1425,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grants"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grants"
&grants.GrantsGrant {
Principal: *string,
@@ -1466,7 +1473,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grants"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grants"
grants.NewGrantsGrantList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) GrantsGrantList
```
@@ -1507,6 +1514,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1514,6 +1522,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1609,7 +1633,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grants"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grants"
grants.NewGrantsGrantOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) GrantsGrantOutputReference
```
diff --git a/docs/grants.java.md b/docs/grants.java.md
index 86630742e..6657ad9ad 100644
--- a/docs/grants.java.md
+++ b/docs/grants.java.md
@@ -264,6 +264,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -350,6 +351,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1724,6 +1731,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1731,6 +1739,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/grants.python.md b/docs/grants.python.md
index 3b58b8176..7cb9c3945 100644
--- a/docs/grants.python.md
+++ b/docs/grants.python.md
@@ -261,6 +261,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -352,6 +353,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1768,6 +1775,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1775,6 +1783,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/grants.typescript.md b/docs/grants.typescript.md
index 2c7798a88..e35f16a9f 100644
--- a/docs/grants.typescript.md
+++ b/docs/grants.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -140,6 +141,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1481,6 +1488,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1488,6 +1496,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/group.csharp.md b/docs/group.csharp.md
index 6ef8fc15d..901e46db3 100644
--- a/docs/group.csharp.md
+++ b/docs/group.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -134,6 +135,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/group.go.md b/docs/group.go.md
index ca66f0147..3f8e4c06d 100644
--- a/docs/group.go.md
+++ b/docs/group.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/group"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/group"
group.NewGroup(scope Construct, id *string, config GroupConfig) Group
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -134,6 +135,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -434,7 +441,7 @@ func ResetWorkspaceAccess()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/group"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/group"
group.Group_IsConstruct(x interface{}) *bool
```
@@ -466,7 +473,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/group"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/group"
group.Group_IsTerraformElement(x interface{}) *bool
```
@@ -480,7 +487,7 @@ group.Group_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/group"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/group"
group.Group_IsTerraformResource(x interface{}) *bool
```
@@ -494,7 +501,7 @@ group.Group_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/group"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/group"
group.Group_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -943,7 +950,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/group"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/group"
&group.GroupConfig {
Connection: interface{},
diff --git a/docs/group.java.md b/docs/group.java.md
index e4f91a400..493c9b7e6 100644
--- a/docs/group.java.md
+++ b/docs/group.java.md
@@ -216,6 +216,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -296,6 +297,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/group.python.md b/docs/group.python.md
index 4f5146f32..f545ad181 100644
--- a/docs/group.python.md
+++ b/docs/group.python.md
@@ -209,6 +209,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -294,6 +295,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/group.typescript.md b/docs/group.typescript.md
index 485343f5e..671763375 100644
--- a/docs/group.typescript.md
+++ b/docs/group.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -134,6 +135,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/groupInstanceProfile.csharp.md b/docs/groupInstanceProfile.csharp.md
index c6c45b501..ed4e93eb7 100644
--- a/docs/groupInstanceProfile.csharp.md
+++ b/docs/groupInstanceProfile.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/groupInstanceProfile.go.md b/docs/groupInstanceProfile.go.md
index f96354d40..397d3ffff 100644
--- a/docs/groupInstanceProfile.go.md
+++ b/docs/groupInstanceProfile.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupinstanceprofile"
groupinstanceprofile.NewGroupInstanceProfile(scope Construct, id *string, config GroupInstanceProfileConfig) GroupInstanceProfile
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupinstanceprofile"
groupinstanceprofile.GroupInstanceProfile_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupinstanceprofile"
groupinstanceprofile.GroupInstanceProfile_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ groupinstanceprofile.GroupInstanceProfile_IsTerraformElement(x interface{}) *boo
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupinstanceprofile"
groupinstanceprofile.GroupInstanceProfile_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ groupinstanceprofile.GroupInstanceProfile_IsTerraformResource(x interface{}) *bo
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupinstanceprofile"
groupinstanceprofile.GroupInstanceProfile_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -733,7 +740,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupinstanceprofile"
&groupinstanceprofile.GroupInstanceProfileConfig {
Connection: interface{},
diff --git a/docs/groupInstanceProfile.java.md b/docs/groupInstanceProfile.java.md
index e4c2f8d1d..e0fc18d59 100644
--- a/docs/groupInstanceProfile.java.md
+++ b/docs/groupInstanceProfile.java.md
@@ -141,6 +141,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -213,6 +214,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/groupInstanceProfile.python.md b/docs/groupInstanceProfile.python.md
index d94a13530..205404133 100644
--- a/docs/groupInstanceProfile.python.md
+++ b/docs/groupInstanceProfile.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -216,6 +217,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/groupInstanceProfile.typescript.md b/docs/groupInstanceProfile.typescript.md
index 6c40b74cd..3a1b4b360 100644
--- a/docs/groupInstanceProfile.typescript.md
+++ b/docs/groupInstanceProfile.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/groupMember.csharp.md b/docs/groupMember.csharp.md
index 19b5bb918..4583076b6 100644
--- a/docs/groupMember.csharp.md
+++ b/docs/groupMember.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/groupMember.go.md b/docs/groupMember.go.md
index e337ea893..b8debfc4c 100644
--- a/docs/groupMember.go.md
+++ b/docs/groupMember.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupmember"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupmember"
groupmember.NewGroupMember(scope Construct, id *string, config GroupMemberConfig) GroupMember
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupmember"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupmember"
groupmember.GroupMember_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupmember"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupmember"
groupmember.GroupMember_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ groupmember.GroupMember_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupmember"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupmember"
groupmember.GroupMember_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ groupmember.GroupMember_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupmember"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupmember"
groupmember.GroupMember_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -733,7 +740,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/groupmember"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/groupmember"
&groupmember.GroupMemberConfig {
Connection: interface{},
diff --git a/docs/groupMember.java.md b/docs/groupMember.java.md
index 11b351e7b..2f0c68f45 100644
--- a/docs/groupMember.java.md
+++ b/docs/groupMember.java.md
@@ -141,6 +141,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -213,6 +214,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/groupMember.python.md b/docs/groupMember.python.md
index 51d5c9492..7461a4dda 100644
--- a/docs/groupMember.python.md
+++ b/docs/groupMember.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -216,6 +217,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/groupMember.typescript.md b/docs/groupMember.typescript.md
index 2f708d36d..e546f0f16 100644
--- a/docs/groupMember.typescript.md
+++ b/docs/groupMember.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/groupRole.csharp.md b/docs/groupRole.csharp.md
index 063d1a410..9eadcd4e0 100644
--- a/docs/groupRole.csharp.md
+++ b/docs/groupRole.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/groupRole.go.md b/docs/groupRole.go.md
index b2cc569ef..e92e897eb 100644
--- a/docs/groupRole.go.md
+++ b/docs/groupRole.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grouprole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grouprole"
grouprole.NewGroupRole(scope Construct, id *string, config GroupRoleConfig) GroupRole
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grouprole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grouprole"
grouprole.GroupRole_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grouprole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grouprole"
grouprole.GroupRole_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ grouprole.GroupRole_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grouprole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grouprole"
grouprole.GroupRole_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ grouprole.GroupRole_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grouprole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grouprole"
grouprole.GroupRole_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -733,7 +740,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/grouprole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/grouprole"
&grouprole.GroupRoleConfig {
Connection: interface{},
diff --git a/docs/groupRole.java.md b/docs/groupRole.java.md
index e7927629f..558bdda8d 100644
--- a/docs/groupRole.java.md
+++ b/docs/groupRole.java.md
@@ -141,6 +141,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -213,6 +214,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/groupRole.python.md b/docs/groupRole.python.md
index c23649077..90ae3265c 100644
--- a/docs/groupRole.python.md
+++ b/docs/groupRole.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -216,6 +217,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/groupRole.typescript.md b/docs/groupRole.typescript.md
index 0c584ddc9..740f53cf9 100644
--- a/docs/groupRole.typescript.md
+++ b/docs/groupRole.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/instancePool.csharp.md b/docs/instancePool.csharp.md
index 073f7cdc3..a8076c82f 100644
--- a/docs/instancePool.csharp.md
+++ b/docs/instancePool.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -145,6 +146,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -4336,6 +4343,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -4343,6 +4351,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -5451,6 +5475,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -5458,6 +5483,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/instancePool.go.md b/docs/instancePool.go.md
index b88295638..636611ae2 100644
--- a/docs/instancePool.go.md
+++ b/docs/instancePool.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePool(scope Construct, id *string, config InstancePoolConfig) InstancePool
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -145,6 +146,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -547,7 +554,7 @@ func ResetPreloadedSparkVersions()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.InstancePool_IsConstruct(x interface{}) *bool
```
@@ -579,7 +586,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.InstancePool_IsTerraformElement(x interface{}) *bool
```
@@ -593,7 +600,7 @@ instancepool.InstancePool_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.InstancePool_IsTerraformResource(x interface{}) *bool
```
@@ -607,7 +614,7 @@ instancepool.InstancePool_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.InstancePool_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1188,7 +1195,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolAwsAttributes {
Availability: *string,
@@ -1248,7 +1255,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolAzureAttributes {
Availability: *string,
@@ -1294,7 +1301,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolConfig {
Connection: interface{},
@@ -1306,14 +1313,14 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepoo
Provisioners: *[]interface{},
IdleInstanceAutoterminationMinutes: *f64,
InstancePoolName: *string,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.instancePool.InstancePoolAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.instancePool.InstancePoolAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.instancePool.InstancePoolAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.instancePool.InstancePoolAzureAttributes,
CustomTags: *map[string]*string,
- DiskSpec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.instancePool.InstancePoolDiskSpec,
+ DiskSpec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.instancePool.InstancePoolDiskSpec,
EnableElasticDisk: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.instancePool.InstancePoolGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.instancePool.InstancePoolGcpAttributes,
Id: *string,
- InstancePoolFleetAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.instancePool.InstancePoolInstancePoolFleetAttributes,
+ InstancePoolFleetAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.instancePool.InstancePoolInstancePoolFleetAttributes,
InstancePoolId: *string,
MaxCapacity: *f64,
MinIdleInstances: *f64,
@@ -1635,12 +1642,12 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolDiskSpec {
DiskCount: *f64,
DiskSize: *f64,
- DiskType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.instancePool.InstancePoolDiskSpecDiskType,
+ DiskType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.instancePool.InstancePoolDiskSpecDiskType,
}
```
@@ -1697,7 +1704,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolDiskSpecDiskType {
AzureDiskVolumeType: *string,
@@ -1743,7 +1750,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolGcpAttributes {
GcpAvailability: *string,
@@ -1789,12 +1796,12 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolInstancePoolFleetAttributes {
LaunchTemplateOverride: interface{},
- FleetOnDemandOption: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.instancePool.InstancePoolInstancePoolFleetAttributesFleetOnDemandOption,
- FleetSpotOption: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.instancePool.InstancePoolInstancePoolFleetAttributesFleetSpotOption,
+ FleetOnDemandOption: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.instancePool.InstancePoolInstancePoolFleetAttributesFleetOnDemandOption,
+ FleetSpotOption: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.instancePool.InstancePoolInstancePoolFleetAttributesFleetSpotOption,
}
```
@@ -1855,7 +1862,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolInstancePoolFleetAttributesFleetOnDemandOption {
AllocationStrategy: *string,
@@ -1901,7 +1908,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolInstancePoolFleetAttributesFleetSpotOption {
AllocationStrategy: *string,
@@ -1947,7 +1954,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolInstancePoolFleetAttributesLaunchTemplateOverride {
AvailabilityZone: *string,
@@ -1993,11 +2000,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolPreloadedDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.instancePool.InstancePoolPreloadedDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.instancePool.InstancePoolPreloadedDockerImageBasicAuth,
}
```
@@ -2041,7 +2048,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
&instancepool.InstancePoolPreloadedDockerImageBasicAuth {
Password: *string,
@@ -2089,7 +2096,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) InstancePoolAwsAttributesOutputReference
```
@@ -2425,7 +2432,7 @@ func InternalValue() InstancePoolAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) InstancePoolAzureAttributesOutputReference
```
@@ -2732,7 +2739,7 @@ func InternalValue() InstancePoolAzureAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolDiskSpecDiskTypeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) InstancePoolDiskSpecDiskTypeOutputReference
```
@@ -3039,7 +3046,7 @@ func InternalValue() InstancePoolDiskSpecDiskType
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolDiskSpecOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) InstancePoolDiskSpecOutputReference
```
@@ -3388,7 +3395,7 @@ func InternalValue() InstancePoolDiskSpec
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) InstancePoolGcpAttributesOutputReference
```
@@ -3695,7 +3702,7 @@ func InternalValue() InstancePoolGcpAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolInstancePoolFleetAttributesFleetOnDemandOptionOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) InstancePoolInstancePoolFleetAttributesFleetOnDemandOptionOutputReference
```
@@ -3995,7 +4002,7 @@ func InternalValue() InstancePoolInstancePoolFleetAttributesFleetOnDemandOption
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolInstancePoolFleetAttributesFleetSpotOptionOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) InstancePoolInstancePoolFleetAttributesFleetSpotOptionOutputReference
```
@@ -4295,7 +4302,7 @@ func InternalValue() InstancePoolInstancePoolFleetAttributesFleetSpotOption
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolInstancePoolFleetAttributesLaunchTemplateOverrideList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) InstancePoolInstancePoolFleetAttributesLaunchTemplateOverrideList
```
@@ -4336,6 +4343,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -4343,6 +4351,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -4438,7 +4462,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolInstancePoolFleetAttributesLaunchTemplateOverrideOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) InstancePoolInstancePoolFleetAttributesLaunchTemplateOverrideOutputReference
```
@@ -4749,7 +4773,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolInstancePoolFleetAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) InstancePoolInstancePoolFleetAttributesOutputReference
```
@@ -5117,7 +5141,7 @@ func InternalValue() InstancePoolInstancePoolFleetAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolPreloadedDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) InstancePoolPreloadedDockerImageBasicAuthOutputReference
```
@@ -5410,7 +5434,7 @@ func InternalValue() InstancePoolPreloadedDockerImageBasicAuth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolPreloadedDockerImageList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) InstancePoolPreloadedDockerImageList
```
@@ -5451,6 +5475,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -5458,6 +5483,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -5553,7 +5594,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instancepool"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instancepool"
instancepool.NewInstancePoolPreloadedDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) InstancePoolPreloadedDockerImageOutputReference
```
diff --git a/docs/instancePool.java.md b/docs/instancePool.java.md
index 8142e4179..78c15f0c3 100644
--- a/docs/instancePool.java.md
+++ b/docs/instancePool.java.md
@@ -285,6 +285,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -376,6 +377,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -4576,6 +4583,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -4583,6 +4591,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -5691,6 +5715,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -5698,6 +5723,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/instancePool.python.md b/docs/instancePool.python.md
index b0c617773..745ca55fb 100644
--- a/docs/instancePool.python.md
+++ b/docs/instancePool.python.md
@@ -281,6 +281,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -377,6 +378,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -4905,6 +4912,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -4912,6 +4920,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -6133,6 +6159,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -6140,6 +6167,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/instancePool.typescript.md b/docs/instancePool.typescript.md
index 4d536a9e2..44b19e996 100644
--- a/docs/instancePool.typescript.md
+++ b/docs/instancePool.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -145,6 +146,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -4276,6 +4283,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -4283,6 +4291,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -5391,6 +5415,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -5398,6 +5423,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/instanceProfile.csharp.md b/docs/instanceProfile.csharp.md
index a25333a68..ab52c5c29 100644
--- a/docs/instanceProfile.csharp.md
+++ b/docs/instanceProfile.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/instanceProfile.go.md b/docs/instanceProfile.go.md
index b1c53e62b..e7c3487a0 100644
--- a/docs/instanceProfile.go.md
+++ b/docs/instanceProfile.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instanceprofile"
instanceprofile.NewInstanceProfile(scope Construct, id *string, config InstanceProfileConfig) InstanceProfile
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -399,7 +406,7 @@ func ResetSkipValidation()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instanceprofile"
instanceprofile.InstanceProfile_IsConstruct(x interface{}) *bool
```
@@ -431,7 +438,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instanceprofile"
instanceprofile.InstanceProfile_IsTerraformElement(x interface{}) *bool
```
@@ -445,7 +452,7 @@ instanceprofile.InstanceProfile_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instanceprofile"
instanceprofile.InstanceProfile_IsTerraformResource(x interface{}) *bool
```
@@ -459,7 +466,7 @@ instanceprofile.InstanceProfile_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instanceprofile"
instanceprofile.InstanceProfile_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -798,7 +805,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/instanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/instanceprofile"
&instanceprofile.InstanceProfileConfig {
Connection: interface{},
diff --git a/docs/instanceProfile.java.md b/docs/instanceProfile.java.md
index 2b3546247..dca9598cc 100644
--- a/docs/instanceProfile.java.md
+++ b/docs/instanceProfile.java.md
@@ -163,6 +163,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -238,6 +239,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/instanceProfile.python.md b/docs/instanceProfile.python.md
index 89dd0bced..a3d4b53a0 100644
--- a/docs/instanceProfile.python.md
+++ b/docs/instanceProfile.python.md
@@ -159,6 +159,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -239,6 +240,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/instanceProfile.typescript.md b/docs/instanceProfile.typescript.md
index 63deda425..844236507 100644
--- a/docs/instanceProfile.typescript.md
+++ b/docs/instanceProfile.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/ipAccessList.csharp.md b/docs/ipAccessList.csharp.md
index 527d5e818..3adbddf54 100644
--- a/docs/ipAccessList.csharp.md
+++ b/docs/ipAccessList.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/ipAccessList.go.md b/docs/ipAccessList.go.md
index 09d42019c..2a24e938a 100644
--- a/docs/ipAccessList.go.md
+++ b/docs/ipAccessList.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/ipaccesslist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/ipaccesslist"
ipaccesslist.NewIpAccessList(scope Construct, id *string, config IpAccessListConfig) IpAccessList
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -385,7 +392,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/ipaccesslist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/ipaccesslist"
ipaccesslist.IpAccessList_IsConstruct(x interface{}) *bool
```
@@ -417,7 +424,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/ipaccesslist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/ipaccesslist"
ipaccesslist.IpAccessList_IsTerraformElement(x interface{}) *bool
```
@@ -431,7 +438,7 @@ ipaccesslist.IpAccessList_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/ipaccesslist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/ipaccesslist"
ipaccesslist.IpAccessList_IsTerraformResource(x interface{}) *bool
```
@@ -445,7 +452,7 @@ ipaccesslist.IpAccessList_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/ipaccesslist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/ipaccesslist"
ipaccesslist.IpAccessList_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -784,7 +791,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/ipaccesslist"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/ipaccesslist"
&ipaccesslist.IpAccessListConfig {
Connection: interface{},
diff --git a/docs/ipAccessList.java.md b/docs/ipAccessList.java.md
index e06766a87..31f8955b1 100644
--- a/docs/ipAccessList.java.md
+++ b/docs/ipAccessList.java.md
@@ -162,6 +162,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -235,6 +236,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/ipAccessList.python.md b/docs/ipAccessList.python.md
index 7da0e931b..a7c172803 100644
--- a/docs/ipAccessList.python.md
+++ b/docs/ipAccessList.python.md
@@ -159,6 +159,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -237,6 +238,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/ipAccessList.typescript.md b/docs/ipAccessList.typescript.md
index 810a49bc2..eec051865 100644
--- a/docs/ipAccessList.typescript.md
+++ b/docs/ipAccessList.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/job.csharp.md b/docs/job.csharp.md
index 0cbffaa3f..a6169da0d 100644
--- a/docs/job.csharp.md
+++ b/docs/job.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -191,6 +192,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -12093,6 +12100,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -12100,6 +12108,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -15275,6 +15299,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -15282,6 +15307,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -15772,6 +15813,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -15779,6 +15821,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -18088,6 +18146,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -18095,6 +18154,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -21018,6 +21093,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -21025,6 +21101,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -25181,6 +25273,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -25188,6 +25281,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -28606,6 +28715,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -28613,6 +28723,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -31536,6 +31662,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -31543,6 +31670,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -35697,6 +35840,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -35704,6 +35848,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -39725,6 +39885,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -39732,6 +39893,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -40835,6 +41012,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -40842,6 +41020,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -41632,6 +41826,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -41639,6 +41834,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -42884,6 +43095,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -42891,6 +43103,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -45200,6 +45428,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -45207,6 +45436,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -48130,6 +48375,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -48137,6 +48383,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -55834,6 +56096,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -55841,6 +56104,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -56673,6 +56952,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -56680,6 +56960,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -58158,6 +58454,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -58165,6 +58462,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -58597,6 +58910,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -58604,6 +58918,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -59036,6 +59366,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -59043,6 +59374,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -59475,6 +59822,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -59482,6 +59830,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -61280,6 +61644,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -61287,6 +61652,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -61719,6 +62100,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -61726,6 +62108,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -62158,6 +62556,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62165,6 +62564,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -62597,6 +63012,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62604,6 +63020,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/job.go.md b/docs/job.go.md
index 3d390c809..8f63b7396 100644
--- a/docs/job.go.md
+++ b/docs/job.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJob(scope Construct, id *string, config JobConfig) Job
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -191,6 +192,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -989,7 +996,7 @@ func ResetWebhookNotifications()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.Job_IsConstruct(x interface{}) *bool
```
@@ -1021,7 +1028,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.Job_IsTerraformElement(x interface{}) *bool
```
@@ -1035,7 +1042,7 @@ job.Job_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.Job_IsTerraformResource(x interface{}) *bool
```
@@ -1049,7 +1056,7 @@ job.Job_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.Job_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -2169,11 +2176,11 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobCompute {
ComputeKey: *string,
- Spec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobComputeSpec,
+ Spec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobComputeSpec,
}
```
@@ -2217,7 +2224,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobComputeSpec {
Kind: *string,
@@ -2249,7 +2256,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobConfig {
Connection: interface{},
@@ -2261,17 +2268,17 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
Provisioners: *[]interface{},
AlwaysRunning: interface{},
Compute: interface{},
- Continuous: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobContinuous,
+ Continuous: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobContinuous,
ControlRunState: interface{},
- DbtTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobDbtTask,
- Deployment: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobDeployment,
+ DbtTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobDbtTask,
+ Deployment: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobDeployment,
Description: *string,
EditMode: *string,
- EmailNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobEmailNotifications,
+ EmailNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobEmailNotifications,
ExistingClusterId: *string,
Format: *string,
- GitSource: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobGitSource,
- Health: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobHealth,
+ GitSource: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobGitSource,
+ Health: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobHealth,
Id: *string,
JobCluster: interface{},
Library: interface{},
@@ -2279,26 +2286,26 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
MaxRetries: *f64,
MinRetryIntervalMillis: *f64,
Name: *string,
- NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewCluster,
- NotebookTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNotebookTask,
- NotificationSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNotificationSettings,
+ NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewCluster,
+ NotebookTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNotebookTask,
+ NotificationSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNotificationSettings,
Parameter: interface{},
- PipelineTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobPipelineTask,
- PythonWheelTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobPythonWheelTask,
- Queue: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobQueue,
+ PipelineTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobPipelineTask,
+ PythonWheelTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobPythonWheelTask,
+ Queue: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobQueue,
RetryOnTimeout: interface{},
- RunAs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobRunAs,
- RunJobTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobRunJobTask,
- Schedule: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobSchedule,
- SparkJarTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobSparkJarTask,
- SparkPythonTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobSparkPythonTask,
- SparkSubmitTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobSparkSubmitTask,
+ RunAs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobRunAs,
+ RunJobTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobRunJobTask,
+ Schedule: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobSchedule,
+ SparkJarTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobSparkJarTask,
+ SparkPythonTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobSparkPythonTask,
+ SparkSubmitTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobSparkSubmitTask,
Tags: *map[string]*string,
Task: interface{},
- Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTimeouts,
+ Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTimeouts,
TimeoutSeconds: *f64,
- Trigger: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTrigger,
- WebhookNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobWebhookNotifications,
+ Trigger: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTrigger,
+ WebhookNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobWebhookNotifications,
}
```
@@ -2966,7 +2973,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobContinuous {
PauseStatus: *string,
@@ -2998,7 +3005,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobDbtTask {
Commands: *[]*string,
@@ -3100,7 +3107,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobDeployment {
Kind: *string,
@@ -3146,7 +3153,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobEmailNotifications {
NoAlertForSkippedRuns: interface{},
@@ -3234,13 +3241,13 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobGitSource {
Url: *string,
Branch: *string,
Commit: *string,
- JobSource: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobGitSourceJobSource,
+ JobSource: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobGitSourceJobSource,
Provider: *string,
Tag: *string,
}
@@ -3338,7 +3345,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobGitSourceJobSource {
ImportFromGitBranch: *string,
@@ -3398,7 +3405,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobHealth {
Rules: interface{},
@@ -3432,7 +3439,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobHealthRules {
Metric: *string,
@@ -3492,11 +3499,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobCluster {
JobClusterKey: *string,
- NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewCluster,
+ NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewCluster,
}
```
@@ -3540,27 +3547,27 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewCluster {
SparkVersion: *string,
ApplyPolicyDefaultValues: interface{},
- Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterAutoscale,
+ Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterAutoscale,
AutoterminationMinutes: *f64,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterAzureAttributes,
ClusterId: *string,
- ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterClusterLogConf,
+ ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterClusterLogConf,
ClusterMountInfo: interface{},
ClusterName: *string,
CustomTags: *map[string]*string,
DataSecurityMode: *string,
- DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterDockerImage,
+ DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterDockerImage,
DriverInstancePoolId: *string,
DriverNodeTypeId: *string,
EnableElasticDisk: interface{},
EnableLocalDiskEncryption: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterGcpAttributes,
IdempotencyToken: *string,
InitScripts: interface{},
InstancePoolId: *string,
@@ -3572,7 +3579,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
SparkConf: *map[string]*string,
SparkEnvVars: *map[string]*string,
SshPublicKeys: *[]*string,
- WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterWorkloadType,
+ WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterWorkloadType,
}
```
@@ -3996,7 +4003,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterAutoscale {
MaxWorkers: *f64,
@@ -4042,7 +4049,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterAwsAttributes {
Availability: *string,
@@ -4172,7 +4179,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterAzureAttributes {
Availability: *string,
@@ -4232,11 +4239,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterClusterLogConf {
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterClusterLogConfDbfs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterClusterLogConfS3,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterClusterLogConfDbfs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterClusterLogConfS3,
}
```
@@ -4282,7 +4289,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterClusterLogConfDbfs {
Destination: *string,
@@ -4314,7 +4321,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterClusterLogConfS3 {
Destination: *string,
@@ -4430,11 +4437,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterClusterMountInfo {
LocalMountDirPath: *string,
- NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfo,
+ NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfo,
RemoteMountDirPath: *string,
}
```
@@ -4492,7 +4499,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfo {
ServerAddress: *string,
@@ -4538,11 +4545,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterDockerImageBasicAuth,
}
```
@@ -4586,7 +4593,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterDockerImageBasicAuth {
Password: *string,
@@ -4632,7 +4639,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterGcpAttributes {
Availability: *string,
@@ -4734,16 +4741,16 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterInitScripts {
- Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterInitScriptsAbfss,
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterInitScriptsDbfs,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterInitScriptsFile,
- Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterInitScriptsGcs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterInitScriptsS3,
- Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterInitScriptsVolumes,
- Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterInitScriptsWorkspace,
+ Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterInitScriptsAbfss,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterInitScriptsDbfs,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterInitScriptsFile,
+ Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterInitScriptsGcs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterInitScriptsS3,
+ Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterInitScriptsVolumes,
+ Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterInitScriptsWorkspace,
}
```
@@ -4864,7 +4871,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterInitScriptsAbfss {
Destination: *string,
@@ -4896,7 +4903,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterInitScriptsDbfs {
Destination: *string,
@@ -4928,7 +4935,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterInitScriptsFile {
Destination: *string,
@@ -4960,7 +4967,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterInitScriptsGcs {
Destination: *string,
@@ -4992,7 +4999,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterInitScriptsS3 {
Destination: *string,
@@ -5108,7 +5115,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterInitScriptsVolumes {
Destination: *string,
@@ -5140,7 +5147,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterInitScriptsWorkspace {
Destination: *string,
@@ -5172,10 +5179,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterWorkloadType {
- Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobJobClusterNewClusterWorkloadTypeClients,
+ Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobJobClusterNewClusterWorkloadTypeClients,
}
```
@@ -5206,7 +5213,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobJobClusterNewClusterWorkloadTypeClients {
Jobs: interface{},
@@ -5252,14 +5259,14 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobLibrary {
- Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobLibraryCran,
+ Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobLibraryCran,
Egg: *string,
Jar: *string,
- Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobLibraryMaven,
- Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobLibraryPypi,
+ Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobLibraryMaven,
+ Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobLibraryPypi,
Whl: *string,
}
```
@@ -5360,7 +5367,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobLibraryCran {
Package: *string,
@@ -5406,7 +5413,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobLibraryMaven {
Coordinates: *string,
@@ -5466,7 +5473,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobLibraryPypi {
Package: *string,
@@ -5512,27 +5519,27 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewCluster {
SparkVersion: *string,
ApplyPolicyDefaultValues: interface{},
- Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterAutoscale,
+ Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterAutoscale,
AutoterminationMinutes: *f64,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterAzureAttributes,
ClusterId: *string,
- ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterClusterLogConf,
+ ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterClusterLogConf,
ClusterMountInfo: interface{},
ClusterName: *string,
CustomTags: *map[string]*string,
DataSecurityMode: *string,
- DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterDockerImage,
+ DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterDockerImage,
DriverInstancePoolId: *string,
DriverNodeTypeId: *string,
EnableElasticDisk: interface{},
EnableLocalDiskEncryption: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterGcpAttributes,
IdempotencyToken: *string,
InitScripts: interface{},
InstancePoolId: *string,
@@ -5544,7 +5551,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
SparkConf: *map[string]*string,
SparkEnvVars: *map[string]*string,
SshPublicKeys: *[]*string,
- WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterWorkloadType,
+ WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterWorkloadType,
}
```
@@ -5968,7 +5975,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterAutoscale {
MaxWorkers: *f64,
@@ -6014,7 +6021,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterAwsAttributes {
Availability: *string,
@@ -6144,7 +6151,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterAzureAttributes {
Availability: *string,
@@ -6204,11 +6211,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterClusterLogConf {
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterClusterLogConfDbfs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterClusterLogConfS3,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterClusterLogConfDbfs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterClusterLogConfS3,
}
```
@@ -6254,7 +6261,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterClusterLogConfDbfs {
Destination: *string,
@@ -6286,7 +6293,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterClusterLogConfS3 {
Destination: *string,
@@ -6402,11 +6409,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterClusterMountInfo {
LocalMountDirPath: *string,
- NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterClusterMountInfoNetworkFilesystemInfo,
+ NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterClusterMountInfoNetworkFilesystemInfo,
RemoteMountDirPath: *string,
}
```
@@ -6464,7 +6471,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterClusterMountInfoNetworkFilesystemInfo {
ServerAddress: *string,
@@ -6510,11 +6517,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterDockerImageBasicAuth,
}
```
@@ -6558,7 +6565,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterDockerImageBasicAuth {
Password: *string,
@@ -6604,7 +6611,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterGcpAttributes {
Availability: *string,
@@ -6706,16 +6713,16 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterInitScripts {
- Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterInitScriptsAbfss,
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterInitScriptsDbfs,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterInitScriptsFile,
- Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterInitScriptsGcs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterInitScriptsS3,
- Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterInitScriptsVolumes,
- Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterInitScriptsWorkspace,
+ Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterInitScriptsAbfss,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterInitScriptsDbfs,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterInitScriptsFile,
+ Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterInitScriptsGcs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterInitScriptsS3,
+ Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterInitScriptsVolumes,
+ Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterInitScriptsWorkspace,
}
```
@@ -6836,7 +6843,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterInitScriptsAbfss {
Destination: *string,
@@ -6868,7 +6875,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterInitScriptsDbfs {
Destination: *string,
@@ -6900,7 +6907,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterInitScriptsFile {
Destination: *string,
@@ -6932,7 +6939,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterInitScriptsGcs {
Destination: *string,
@@ -6964,7 +6971,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterInitScriptsS3 {
Destination: *string,
@@ -7080,7 +7087,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterInitScriptsVolumes {
Destination: *string,
@@ -7112,7 +7119,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterInitScriptsWorkspace {
Destination: *string,
@@ -7144,10 +7151,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterWorkloadType {
- Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobNewClusterWorkloadTypeClients,
+ Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobNewClusterWorkloadTypeClients,
}
```
@@ -7178,7 +7185,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNewClusterWorkloadTypeClients {
Jobs: interface{},
@@ -7224,7 +7231,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNotebookTask {
NotebookPath: *string,
@@ -7284,7 +7291,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobNotificationSettings {
NoAlertForCanceledRuns: interface{},
@@ -7330,7 +7337,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobParameter {
Default: *string,
@@ -7376,7 +7383,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobPipelineTask {
PipelineId: *string,
@@ -7422,7 +7429,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobPythonWheelTask {
EntryPoint: *string,
@@ -7496,7 +7503,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobQueue {
Enabled: interface{},
@@ -7528,7 +7535,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobRunAs {
ServicePrincipalName: *string,
@@ -7574,7 +7581,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobRunJobTask {
JobId: *f64,
@@ -7620,7 +7627,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobSchedule {
QuartzCronExpression: *string,
@@ -7680,7 +7687,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobSparkJarTask {
JarUri: *string,
@@ -7740,7 +7747,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobSparkPythonTask {
PythonFile: *string,
@@ -7800,7 +7807,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobSparkSubmitTask {
Parameters: *[]*string,
@@ -7832,36 +7839,36 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTask {
ComputeKey: *string,
- ConditionTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskConditionTask,
- DbtTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskDbtTask,
+ ConditionTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskConditionTask,
+ DbtTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskDbtTask,
DependsOn: interface{},
Description: *string,
- EmailNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskEmailNotifications,
+ EmailNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskEmailNotifications,
ExistingClusterId: *string,
- Health: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskHealth,
+ Health: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskHealth,
JobClusterKey: *string,
Library: interface{},
MaxRetries: *f64,
MinRetryIntervalMillis: *f64,
- NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewCluster,
- NotebookTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNotebookTask,
- NotificationSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNotificationSettings,
- PipelineTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskPipelineTask,
- PythonWheelTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskPythonWheelTask,
+ NewCluster: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewCluster,
+ NotebookTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNotebookTask,
+ NotificationSettings: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNotificationSettings,
+ PipelineTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskPipelineTask,
+ PythonWheelTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskPythonWheelTask,
RetryOnTimeout: interface{},
RunIf: *string,
- RunJobTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskRunJobTask,
- SparkJarTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskSparkJarTask,
- SparkPythonTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskSparkPythonTask,
- SparkSubmitTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskSparkSubmitTask,
- SqlTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskSqlTask,
+ RunJobTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskRunJobTask,
+ SparkJarTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskSparkJarTask,
+ SparkPythonTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskSparkPythonTask,
+ SparkSubmitTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskSparkSubmitTask,
+ SqlTask: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskSqlTask,
TaskKey: *string,
TimeoutSeconds: *f64,
- WebhookNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskWebhookNotifications,
+ WebhookNotifications: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskWebhookNotifications,
}
```
@@ -8262,7 +8269,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskConditionTask {
Left: *string,
@@ -8322,7 +8329,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskDbtTask {
Commands: *[]*string,
@@ -8424,7 +8431,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskDependsOn {
TaskKey: *string,
@@ -8470,7 +8477,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskEmailNotifications {
OnDurationWarningThresholdExceeded: *[]*string,
@@ -8544,7 +8551,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskHealth {
Rules: interface{},
@@ -8578,7 +8585,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskHealthRules {
Metric: *string,
@@ -8638,14 +8645,14 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskLibrary {
- Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskLibraryCran,
+ Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskLibraryCran,
Egg: *string,
Jar: *string,
- Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskLibraryMaven,
- Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskLibraryPypi,
+ Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskLibraryMaven,
+ Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskLibraryPypi,
Whl: *string,
}
```
@@ -8746,7 +8753,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskLibraryCran {
Package: *string,
@@ -8792,7 +8799,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskLibraryMaven {
Coordinates: *string,
@@ -8852,7 +8859,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskLibraryPypi {
Package: *string,
@@ -8898,27 +8905,27 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewCluster {
SparkVersion: *string,
ApplyPolicyDefaultValues: interface{},
- Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterAutoscale,
+ Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterAutoscale,
AutoterminationMinutes: *f64,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterAzureAttributes,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterAzureAttributes,
ClusterId: *string,
- ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterClusterLogConf,
+ ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterClusterLogConf,
ClusterMountInfo: interface{},
ClusterName: *string,
CustomTags: *map[string]*string,
DataSecurityMode: *string,
- DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterDockerImage,
+ DockerImage: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterDockerImage,
DriverInstancePoolId: *string,
DriverNodeTypeId: *string,
EnableElasticDisk: interface{},
EnableLocalDiskEncryption: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterGcpAttributes,
IdempotencyToken: *string,
InitScripts: interface{},
InstancePoolId: *string,
@@ -8930,7 +8937,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
SparkConf: *map[string]*string,
SparkEnvVars: *map[string]*string,
SshPublicKeys: *[]*string,
- WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterWorkloadType,
+ WorkloadType: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterWorkloadType,
}
```
@@ -9354,7 +9361,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterAutoscale {
MaxWorkers: *f64,
@@ -9400,7 +9407,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterAwsAttributes {
Availability: *string,
@@ -9530,7 +9537,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterAzureAttributes {
Availability: *string,
@@ -9590,11 +9597,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterClusterLogConf {
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterClusterLogConfDbfs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterClusterLogConfS3,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterClusterLogConfDbfs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterClusterLogConfS3,
}
```
@@ -9640,7 +9647,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterClusterLogConfDbfs {
Destination: *string,
@@ -9672,7 +9679,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterClusterLogConfS3 {
Destination: *string,
@@ -9788,11 +9795,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterClusterMountInfo {
LocalMountDirPath: *string,
- NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterClusterMountInfoNetworkFilesystemInfo,
+ NetworkFilesystemInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterClusterMountInfoNetworkFilesystemInfo,
RemoteMountDirPath: *string,
}
```
@@ -9850,7 +9857,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterClusterMountInfoNetworkFilesystemInfo {
ServerAddress: *string,
@@ -9896,11 +9903,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterDockerImage {
Url: *string,
- BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterDockerImageBasicAuth,
+ BasicAuth: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterDockerImageBasicAuth,
}
```
@@ -9944,7 +9951,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterDockerImageBasicAuth {
Password: *string,
@@ -9990,7 +9997,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterGcpAttributes {
Availability: *string,
@@ -10092,16 +10099,16 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterInitScripts {
- Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterInitScriptsAbfss,
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterInitScriptsDbfs,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterInitScriptsFile,
- Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterInitScriptsGcs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterInitScriptsS3,
- Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterInitScriptsVolumes,
- Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterInitScriptsWorkspace,
+ Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterInitScriptsAbfss,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterInitScriptsDbfs,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterInitScriptsFile,
+ Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterInitScriptsGcs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterInitScriptsS3,
+ Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterInitScriptsVolumes,
+ Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterInitScriptsWorkspace,
}
```
@@ -10222,7 +10229,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterInitScriptsAbfss {
Destination: *string,
@@ -10254,7 +10261,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterInitScriptsDbfs {
Destination: *string,
@@ -10286,7 +10293,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterInitScriptsFile {
Destination: *string,
@@ -10318,7 +10325,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterInitScriptsGcs {
Destination: *string,
@@ -10350,7 +10357,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterInitScriptsS3 {
Destination: *string,
@@ -10466,7 +10473,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterInitScriptsVolumes {
Destination: *string,
@@ -10498,7 +10505,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterInitScriptsWorkspace {
Destination: *string,
@@ -10530,10 +10537,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterWorkloadType {
- Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskNewClusterWorkloadTypeClients,
+ Clients: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskNewClusterWorkloadTypeClients,
}
```
@@ -10564,7 +10571,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNewClusterWorkloadTypeClients {
Jobs: interface{},
@@ -10610,7 +10617,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNotebookTask {
NotebookPath: *string,
@@ -10670,7 +10677,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskNotificationSettings {
AlertOnLastAttempt: interface{},
@@ -10730,7 +10737,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskPipelineTask {
PipelineId: *string,
@@ -10776,7 +10783,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskPythonWheelTask {
EntryPoint: *string,
@@ -10850,7 +10857,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskRunJobTask {
JobId: *f64,
@@ -10896,7 +10903,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSparkJarTask {
JarUri: *string,
@@ -10956,7 +10963,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSparkPythonTask {
PythonFile: *string,
@@ -11016,7 +11023,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSparkSubmitTask {
Parameters: *[]*string,
@@ -11048,14 +11055,14 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSqlTask {
- Alert: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskSqlTaskAlert,
- Dashboard: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskSqlTaskDashboard,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskSqlTaskFile,
+ Alert: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskSqlTaskAlert,
+ Dashboard: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskSqlTaskDashboard,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskSqlTaskFile,
Parameters: *map[string]*string,
- Query: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTaskSqlTaskQuery,
+ Query: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTaskSqlTaskQuery,
WarehouseId: *string,
}
```
@@ -11158,7 +11165,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSqlTaskAlert {
AlertId: *string,
@@ -11220,7 +11227,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSqlTaskAlertSubscriptions {
DestinationId: *string,
@@ -11266,7 +11273,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSqlTaskDashboard {
DashboardId: *string,
@@ -11342,7 +11349,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSqlTaskDashboardSubscriptions {
DestinationId: *string,
@@ -11388,7 +11395,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSqlTaskFile {
Path: *string,
@@ -11420,7 +11427,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskSqlTaskQuery {
QueryId: *string,
@@ -11452,7 +11459,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskWebhookNotifications {
OnDurationWarningThresholdExceeded: interface{},
@@ -11534,7 +11541,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskWebhookNotificationsOnDurationWarningThresholdExceeded {
Id: *string,
@@ -11569,7 +11576,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskWebhookNotificationsOnFailure {
Id: *string,
@@ -11604,7 +11611,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskWebhookNotificationsOnStart {
Id: *string,
@@ -11639,7 +11646,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTaskWebhookNotificationsOnSuccess {
Id: *string,
@@ -11674,7 +11681,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTimeouts {
Create: *string,
@@ -11720,10 +11727,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTrigger {
- FileArrival: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.job.JobTriggerFileArrival,
+ FileArrival: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.job.JobTriggerFileArrival,
PauseStatus: *string,
}
```
@@ -11768,7 +11775,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobTriggerFileArrival {
Url: *string,
@@ -11828,7 +11835,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobWebhookNotifications {
OnDurationWarningThresholdExceeded: interface{},
@@ -11910,7 +11917,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobWebhookNotificationsOnDurationWarningThresholdExceeded {
Id: *string,
@@ -11945,7 +11952,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobWebhookNotificationsOnFailure {
Id: *string,
@@ -11980,7 +11987,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobWebhookNotificationsOnStart {
Id: *string,
@@ -12015,7 +12022,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
&job.JobWebhookNotificationsOnSuccess {
Id: *string,
@@ -12052,7 +12059,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobComputeList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobComputeList
```
@@ -12093,6 +12100,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -12100,6 +12108,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -12195,7 +12219,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobComputeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobComputeOutputReference
```
@@ -12533,7 +12557,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobComputeSpecOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobComputeSpecOutputReference
```
@@ -12811,7 +12835,7 @@ func InternalValue() JobComputeSpec
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobContinuousOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobContinuousOutputReference
```
@@ -13089,7 +13113,7 @@ func InternalValue() JobContinuous
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobDbtTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobDbtTaskOutputReference
```
@@ -13505,7 +13529,7 @@ func InternalValue() JobDbtTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobDeploymentOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobDeploymentOutputReference
```
@@ -13805,7 +13829,7 @@ func InternalValue() JobDeployment
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobEmailNotificationsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobEmailNotificationsOutputReference
```
@@ -14199,7 +14223,7 @@ func InternalValue() JobEmailNotifications
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobGitSourceJobSourceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobGitSourceJobSourceOutputReference
```
@@ -14521,7 +14545,7 @@ func InternalValue() JobGitSourceJobSource
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobGitSourceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobGitSourceOutputReference
```
@@ -14950,7 +14974,7 @@ func InternalValue() JobGitSource
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobHealthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobHealthOutputReference
```
@@ -15234,7 +15258,7 @@ func InternalValue() JobHealth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobHealthRulesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobHealthRulesList
```
@@ -15275,6 +15299,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -15282,6 +15307,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -15377,7 +15418,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobHealthRulesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobHealthRulesOutputReference
```
@@ -15731,7 +15772,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobJobClusterList
```
@@ -15772,6 +15813,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -15779,6 +15821,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -15874,7 +15932,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterAutoscaleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterAutoscaleOutputReference
```
@@ -16181,7 +16239,7 @@ func InternalValue() JobJobClusterNewClusterAutoscale
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterAwsAttributesOutputReference
```
@@ -16662,7 +16720,7 @@ func InternalValue() JobJobClusterNewClusterAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterAzureAttributesOutputReference
```
@@ -16998,7 +17056,7 @@ func InternalValue() JobJobClusterNewClusterAzureAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterClusterLogConfDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterClusterLogConfDbfsOutputReference
```
@@ -17269,7 +17327,7 @@ func InternalValue() JobJobClusterNewClusterClusterLogConfDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterClusterLogConfOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterClusterLogConfOutputReference
```
@@ -17602,7 +17660,7 @@ func InternalValue() JobJobClusterNewClusterClusterLogConf
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterClusterLogConfS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterClusterLogConfS3OutputReference
```
@@ -18047,7 +18105,7 @@ func InternalValue() JobJobClusterNewClusterClusterLogConfS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterClusterMountInfoList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobJobClusterNewClusterClusterMountInfoList
```
@@ -18088,6 +18146,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -18095,6 +18154,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -18190,7 +18265,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference
```
@@ -18490,7 +18565,7 @@ func InternalValue() JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInf
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterClusterMountInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobJobClusterNewClusterClusterMountInfoOutputReference
```
@@ -18843,7 +18918,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterDockerImageBasicAuthOutputReference
```
@@ -19136,7 +19211,7 @@ func InternalValue() JobJobClusterNewClusterDockerImageBasicAuth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterDockerImageOutputReference
```
@@ -19449,7 +19524,7 @@ func InternalValue() JobJobClusterNewClusterDockerImage
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterGcpAttributesOutputReference
```
@@ -19872,7 +19947,7 @@ func InternalValue() JobJobClusterNewClusterGcpAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterInitScriptsAbfssOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterInitScriptsAbfssOutputReference
```
@@ -20150,7 +20225,7 @@ func InternalValue() JobJobClusterNewClusterInitScriptsAbfss
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterInitScriptsDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterInitScriptsDbfsOutputReference
```
@@ -20421,7 +20496,7 @@ func InternalValue() JobJobClusterNewClusterInitScriptsDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterInitScriptsFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterInitScriptsFileOutputReference
```
@@ -20699,7 +20774,7 @@ func InternalValue() JobJobClusterNewClusterInitScriptsFile
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterInitScriptsGcsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterInitScriptsGcsOutputReference
```
@@ -20977,7 +21052,7 @@ func InternalValue() JobJobClusterNewClusterInitScriptsGcs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterInitScriptsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobJobClusterNewClusterInitScriptsList
```
@@ -21018,6 +21093,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -21025,6 +21101,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -21120,7 +21212,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterInitScriptsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobJobClusterNewClusterInitScriptsOutputReference
```
@@ -21681,7 +21773,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterInitScriptsS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterInitScriptsS3OutputReference
```
@@ -22126,7 +22218,7 @@ func InternalValue() JobJobClusterNewClusterInitScriptsS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterInitScriptsVolumesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterInitScriptsVolumesOutputReference
```
@@ -22404,7 +22496,7 @@ func InternalValue() JobJobClusterNewClusterInitScriptsVolumes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterInitScriptsWorkspaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterInitScriptsWorkspaceOutputReference
```
@@ -22682,7 +22774,7 @@ func InternalValue() JobJobClusterNewClusterInitScriptsWorkspace
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterOutputReference
```
@@ -23911,7 +24003,7 @@ func InternalValue() JobJobClusterNewCluster
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterWorkloadTypeClientsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterWorkloadTypeClientsOutputReference
```
@@ -24218,7 +24310,7 @@ func InternalValue() JobJobClusterNewClusterWorkloadTypeClients
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterNewClusterWorkloadTypeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobJobClusterNewClusterWorkloadTypeOutputReference
```
@@ -24502,7 +24594,7 @@ func InternalValue() JobJobClusterNewClusterWorkloadType
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobJobClusterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobJobClusterOutputReference
```
@@ -24840,7 +24932,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobLibraryCranOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobLibraryCranOutputReference
```
@@ -25140,7 +25232,7 @@ func InternalValue() JobLibraryCran
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobLibraryList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobLibraryList
```
@@ -25181,6 +25273,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -25188,6 +25281,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -25283,7 +25392,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobLibraryMavenOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobLibraryMavenOutputReference
```
@@ -25612,7 +25721,7 @@ func InternalValue() JobLibraryMaven
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobLibraryOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobLibraryOutputReference
```
@@ -26092,7 +26201,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobLibraryPypiOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobLibraryPypiOutputReference
```
@@ -26392,7 +26501,7 @@ func InternalValue() JobLibraryPypi
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterAutoscaleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterAutoscaleOutputReference
```
@@ -26699,7 +26808,7 @@ func InternalValue() JobNewClusterAutoscale
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterAwsAttributesOutputReference
```
@@ -27180,7 +27289,7 @@ func InternalValue() JobNewClusterAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterAzureAttributesOutputReference
```
@@ -27516,7 +27625,7 @@ func InternalValue() JobNewClusterAzureAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterClusterLogConfDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterClusterLogConfDbfsOutputReference
```
@@ -27787,7 +27896,7 @@ func InternalValue() JobNewClusterClusterLogConfDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterClusterLogConfOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterClusterLogConfOutputReference
```
@@ -28120,7 +28229,7 @@ func InternalValue() JobNewClusterClusterLogConf
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterClusterLogConfS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterClusterLogConfS3OutputReference
```
@@ -28565,7 +28674,7 @@ func InternalValue() JobNewClusterClusterLogConfS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterClusterMountInfoList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobNewClusterClusterMountInfoList
```
@@ -28606,6 +28715,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -28613,6 +28723,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -28708,7 +28834,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference
```
@@ -29008,7 +29134,7 @@ func InternalValue() JobNewClusterClusterMountInfoNetworkFilesystemInfo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterClusterMountInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobNewClusterClusterMountInfoOutputReference
```
@@ -29361,7 +29487,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterDockerImageBasicAuthOutputReference
```
@@ -29654,7 +29780,7 @@ func InternalValue() JobNewClusterDockerImageBasicAuth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterDockerImageOutputReference
```
@@ -29967,7 +30093,7 @@ func InternalValue() JobNewClusterDockerImage
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterGcpAttributesOutputReference
```
@@ -30390,7 +30516,7 @@ func InternalValue() JobNewClusterGcpAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterInitScriptsAbfssOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterInitScriptsAbfssOutputReference
```
@@ -30668,7 +30794,7 @@ func InternalValue() JobNewClusterInitScriptsAbfss
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterInitScriptsDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterInitScriptsDbfsOutputReference
```
@@ -30939,7 +31065,7 @@ func InternalValue() JobNewClusterInitScriptsDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterInitScriptsFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterInitScriptsFileOutputReference
```
@@ -31217,7 +31343,7 @@ func InternalValue() JobNewClusterInitScriptsFile
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterInitScriptsGcsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterInitScriptsGcsOutputReference
```
@@ -31495,7 +31621,7 @@ func InternalValue() JobNewClusterInitScriptsGcs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterInitScriptsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobNewClusterInitScriptsList
```
@@ -31536,6 +31662,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -31543,6 +31670,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -31638,7 +31781,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterInitScriptsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobNewClusterInitScriptsOutputReference
```
@@ -32199,7 +32342,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterInitScriptsS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterInitScriptsS3OutputReference
```
@@ -32644,7 +32787,7 @@ func InternalValue() JobNewClusterInitScriptsS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterInitScriptsVolumesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterInitScriptsVolumesOutputReference
```
@@ -32922,7 +33065,7 @@ func InternalValue() JobNewClusterInitScriptsVolumes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterInitScriptsWorkspaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterInitScriptsWorkspaceOutputReference
```
@@ -33200,7 +33343,7 @@ func InternalValue() JobNewClusterInitScriptsWorkspace
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterOutputReference
```
@@ -34429,7 +34572,7 @@ func InternalValue() JobNewCluster
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterWorkloadTypeClientsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterWorkloadTypeClientsOutputReference
```
@@ -34736,7 +34879,7 @@ func InternalValue() JobNewClusterWorkloadTypeClients
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNewClusterWorkloadTypeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNewClusterWorkloadTypeOutputReference
```
@@ -35020,7 +35163,7 @@ func InternalValue() JobNewClusterWorkloadType
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNotebookTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNotebookTaskOutputReference
```
@@ -35349,7 +35492,7 @@ func InternalValue() JobNotebookTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobNotificationSettingsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobNotificationSettingsOutputReference
```
@@ -35656,7 +35799,7 @@ func InternalValue() JobNotificationSettings
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobParameterList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobParameterList
```
@@ -35697,6 +35840,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -35704,6 +35848,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -35799,7 +35959,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobParameterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobParameterOutputReference
```
@@ -36124,7 +36284,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobPipelineTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobPipelineTaskOutputReference
```
@@ -36424,7 +36584,7 @@ func InternalValue() JobPipelineTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobPythonWheelTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobPythonWheelTaskOutputReference
```
@@ -36789,7 +36949,7 @@ func InternalValue() JobPythonWheelTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobQueueOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobQueueOutputReference
```
@@ -37060,7 +37220,7 @@ func InternalValue() JobQueue
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobRunAsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobRunAsOutputReference
```
@@ -37367,7 +37527,7 @@ func InternalValue() JobRunAs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobRunJobTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobRunJobTaskOutputReference
```
@@ -37667,7 +37827,7 @@ func InternalValue() JobRunJobTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobScheduleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobScheduleOutputReference
```
@@ -37989,7 +38149,7 @@ func InternalValue() JobSchedule
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobSparkJarTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobSparkJarTaskOutputReference
```
@@ -38325,7 +38485,7 @@ func InternalValue() JobSparkJarTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobSparkPythonTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobSparkPythonTaskOutputReference
```
@@ -38654,7 +38814,7 @@ func InternalValue() JobSparkPythonTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobSparkSubmitTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobSparkSubmitTaskOutputReference
```
@@ -38932,7 +39092,7 @@ func InternalValue() JobSparkSubmitTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskConditionTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskConditionTaskOutputReference
```
@@ -39268,7 +39428,7 @@ func InternalValue() JobTaskConditionTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskDbtTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskDbtTaskOutputReference
```
@@ -39684,7 +39844,7 @@ func InternalValue() JobTaskDbtTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskDependsOnList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskDependsOnList
```
@@ -39725,6 +39885,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -39732,6 +39893,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -39827,7 +40004,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskDependsOnOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskDependsOnOutputReference
```
@@ -40145,7 +40322,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskEmailNotificationsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskEmailNotificationsOutputReference
```
@@ -40510,7 +40687,7 @@ func InternalValue() JobTaskEmailNotifications
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskHealthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskHealthOutputReference
```
@@ -40794,7 +40971,7 @@ func InternalValue() JobTaskHealth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskHealthRulesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskHealthRulesList
```
@@ -40835,6 +41012,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -40842,6 +41020,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -40937,7 +41131,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskHealthRulesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskHealthRulesOutputReference
```
@@ -41291,7 +41485,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskLibraryCranOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskLibraryCranOutputReference
```
@@ -41591,7 +41785,7 @@ func InternalValue() JobTaskLibraryCran
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskLibraryList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskLibraryList
```
@@ -41632,6 +41826,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -41639,6 +41834,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -41734,7 +41945,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskLibraryMavenOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskLibraryMavenOutputReference
```
@@ -42063,7 +42274,7 @@ func InternalValue() JobTaskLibraryMaven
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskLibraryOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskLibraryOutputReference
```
@@ -42543,7 +42754,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskLibraryPypiOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskLibraryPypiOutputReference
```
@@ -42843,7 +43054,7 @@ func InternalValue() JobTaskLibraryPypi
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskList
```
@@ -42884,6 +43095,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -42891,6 +43103,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -42986,7 +43214,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterAutoscaleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterAutoscaleOutputReference
```
@@ -43293,7 +43521,7 @@ func InternalValue() JobTaskNewClusterAutoscale
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterAwsAttributesOutputReference
```
@@ -43774,7 +44002,7 @@ func InternalValue() JobTaskNewClusterAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterAzureAttributesOutputReference
```
@@ -44110,7 +44338,7 @@ func InternalValue() JobTaskNewClusterAzureAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterClusterLogConfDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterClusterLogConfDbfsOutputReference
```
@@ -44381,7 +44609,7 @@ func InternalValue() JobTaskNewClusterClusterLogConfDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterClusterLogConfOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterClusterLogConfOutputReference
```
@@ -44714,7 +44942,7 @@ func InternalValue() JobTaskNewClusterClusterLogConf
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterClusterLogConfS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterClusterLogConfS3OutputReference
```
@@ -45159,7 +45387,7 @@ func InternalValue() JobTaskNewClusterClusterLogConfS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterClusterMountInfoList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskNewClusterClusterMountInfoList
```
@@ -45200,6 +45428,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -45207,6 +45436,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -45302,7 +45547,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference
```
@@ -45602,7 +45847,7 @@ func InternalValue() JobTaskNewClusterClusterMountInfoNetworkFilesystemInfo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterClusterMountInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskNewClusterClusterMountInfoOutputReference
```
@@ -45955,7 +46200,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterDockerImageBasicAuthOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterDockerImageBasicAuthOutputReference
```
@@ -46248,7 +46493,7 @@ func InternalValue() JobTaskNewClusterDockerImageBasicAuth
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterDockerImageOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterDockerImageOutputReference
```
@@ -46561,7 +46806,7 @@ func InternalValue() JobTaskNewClusterDockerImage
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterGcpAttributesOutputReference
```
@@ -46984,7 +47229,7 @@ func InternalValue() JobTaskNewClusterGcpAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterInitScriptsAbfssOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterInitScriptsAbfssOutputReference
```
@@ -47262,7 +47507,7 @@ func InternalValue() JobTaskNewClusterInitScriptsAbfss
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterInitScriptsDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterInitScriptsDbfsOutputReference
```
@@ -47533,7 +47778,7 @@ func InternalValue() JobTaskNewClusterInitScriptsDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterInitScriptsFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterInitScriptsFileOutputReference
```
@@ -47811,7 +48056,7 @@ func InternalValue() JobTaskNewClusterInitScriptsFile
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterInitScriptsGcsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterInitScriptsGcsOutputReference
```
@@ -48089,7 +48334,7 @@ func InternalValue() JobTaskNewClusterInitScriptsGcs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterInitScriptsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskNewClusterInitScriptsList
```
@@ -48130,6 +48375,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -48137,6 +48383,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -48232,7 +48494,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterInitScriptsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskNewClusterInitScriptsOutputReference
```
@@ -48793,7 +49055,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterInitScriptsS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterInitScriptsS3OutputReference
```
@@ -49238,7 +49500,7 @@ func InternalValue() JobTaskNewClusterInitScriptsS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterInitScriptsVolumesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterInitScriptsVolumesOutputReference
```
@@ -49516,7 +49778,7 @@ func InternalValue() JobTaskNewClusterInitScriptsVolumes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterInitScriptsWorkspaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterInitScriptsWorkspaceOutputReference
```
@@ -49794,7 +50056,7 @@ func InternalValue() JobTaskNewClusterInitScriptsWorkspace
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterOutputReference
```
@@ -51023,7 +51285,7 @@ func InternalValue() JobTaskNewCluster
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterWorkloadTypeClientsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterWorkloadTypeClientsOutputReference
```
@@ -51330,7 +51592,7 @@ func InternalValue() JobTaskNewClusterWorkloadTypeClients
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNewClusterWorkloadTypeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNewClusterWorkloadTypeOutputReference
```
@@ -51614,7 +51876,7 @@ func InternalValue() JobTaskNewClusterWorkloadType
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNotebookTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNotebookTaskOutputReference
```
@@ -51943,7 +52205,7 @@ func InternalValue() JobTaskNotebookTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskNotificationSettingsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskNotificationSettingsOutputReference
```
@@ -52279,7 +52541,7 @@ func InternalValue() JobTaskNotificationSettings
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskOutputReference
```
@@ -53550,7 +53812,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskPipelineTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskPipelineTaskOutputReference
```
@@ -53850,7 +54112,7 @@ func InternalValue() JobTaskPipelineTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskPythonWheelTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskPythonWheelTaskOutputReference
```
@@ -54215,7 +54477,7 @@ func InternalValue() JobTaskPythonWheelTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskRunJobTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskRunJobTaskOutputReference
```
@@ -54515,7 +54777,7 @@ func InternalValue() JobTaskRunJobTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSparkJarTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskSparkJarTaskOutputReference
```
@@ -54851,7 +55113,7 @@ func InternalValue() JobTaskSparkJarTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSparkPythonTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskSparkPythonTaskOutputReference
```
@@ -55180,7 +55442,7 @@ func InternalValue() JobTaskSparkPythonTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSparkSubmitTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskSparkSubmitTaskOutputReference
```
@@ -55458,7 +55720,7 @@ func InternalValue() JobTaskSparkSubmitTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSqlTaskAlertOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskSqlTaskAlertOutputReference
```
@@ -55793,7 +56055,7 @@ func InternalValue() JobTaskSqlTaskAlert
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSqlTaskAlertSubscriptionsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskSqlTaskAlertSubscriptionsList
```
@@ -55834,6 +56096,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -55841,6 +56104,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -55936,7 +56215,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSqlTaskAlertSubscriptionsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskSqlTaskAlertSubscriptionsOutputReference
```
@@ -56261,7 +56540,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSqlTaskDashboardOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskSqlTaskDashboardOutputReference
```
@@ -56632,7 +56911,7 @@ func InternalValue() JobTaskSqlTaskDashboard
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSqlTaskDashboardSubscriptionsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskSqlTaskDashboardSubscriptionsList
```
@@ -56673,6 +56952,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -56680,6 +56960,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -56775,7 +57071,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSqlTaskDashboardSubscriptionsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskSqlTaskDashboardSubscriptionsOutputReference
```
@@ -57100,7 +57396,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSqlTaskFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskSqlTaskFileOutputReference
```
@@ -57371,7 +57667,7 @@ func InternalValue() JobTaskSqlTaskFile
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSqlTaskOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskSqlTaskOutputReference
```
@@ -57846,7 +58142,7 @@ func InternalValue() JobTaskSqlTask
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskSqlTaskQueryOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskSqlTaskQueryOutputReference
```
@@ -58117,7 +58413,7 @@ func InternalValue() JobTaskSqlTaskQuery
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskWebhookNotificationsOnDurationWarningThresholdExceededList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskWebhookNotificationsOnDurationWarningThresholdExceededList
```
@@ -58158,6 +58454,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -58165,6 +58462,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -58260,7 +58573,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskWebhookNotificationsOnDurationWarningThresholdExceededOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskWebhookNotificationsOnDurationWarningThresholdExceededOutputReference
```
@@ -58556,7 +58869,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskWebhookNotificationsOnFailureList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskWebhookNotificationsOnFailureList
```
@@ -58597,6 +58910,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -58604,6 +58918,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -58699,7 +59029,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskWebhookNotificationsOnFailureOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskWebhookNotificationsOnFailureOutputReference
```
@@ -58995,7 +59325,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskWebhookNotificationsOnStartList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskWebhookNotificationsOnStartList
```
@@ -59036,6 +59366,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -59043,6 +59374,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -59138,7 +59485,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskWebhookNotificationsOnStartOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskWebhookNotificationsOnStartOutputReference
```
@@ -59434,7 +59781,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskWebhookNotificationsOnSuccessList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobTaskWebhookNotificationsOnSuccessList
```
@@ -59475,6 +59822,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -59482,6 +59830,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -59577,7 +59941,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskWebhookNotificationsOnSuccessOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobTaskWebhookNotificationsOnSuccessOutputReference
```
@@ -59873,7 +60237,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTaskWebhookNotificationsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTaskWebhookNotificationsOutputReference
```
@@ -60290,7 +60654,7 @@ func InternalValue() JobTaskWebhookNotifications
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTimeoutsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTimeoutsOutputReference
```
@@ -60597,7 +60961,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTriggerFileArrivalOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTriggerFileArrivalOutputReference
```
@@ -60926,7 +61290,7 @@ func InternalValue() JobTriggerFileArrival
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobTriggerOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobTriggerOutputReference
```
@@ -61239,7 +61603,7 @@ func InternalValue() JobTrigger
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobWebhookNotificationsOnDurationWarningThresholdExceededList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobWebhookNotificationsOnDurationWarningThresholdExceededList
```
@@ -61280,6 +61644,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -61287,6 +61652,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -61382,7 +61763,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobWebhookNotificationsOnDurationWarningThresholdExceededOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobWebhookNotificationsOnDurationWarningThresholdExceededOutputReference
```
@@ -61678,7 +62059,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobWebhookNotificationsOnFailureList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobWebhookNotificationsOnFailureList
```
@@ -61719,6 +62100,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -61726,6 +62108,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -61821,7 +62219,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobWebhookNotificationsOnFailureOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobWebhookNotificationsOnFailureOutputReference
```
@@ -62117,7 +62515,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobWebhookNotificationsOnStartList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobWebhookNotificationsOnStartList
```
@@ -62158,6 +62556,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62165,6 +62564,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -62260,7 +62675,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobWebhookNotificationsOnStartOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobWebhookNotificationsOnStartOutputReference
```
@@ -62556,7 +62971,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobWebhookNotificationsOnSuccessList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) JobWebhookNotificationsOnSuccessList
```
@@ -62597,6 +63012,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -62604,6 +63020,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -62699,7 +63131,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobWebhookNotificationsOnSuccessOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) JobWebhookNotificationsOnSuccessOutputReference
```
@@ -62995,7 +63427,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/job"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/job"
job.NewJobWebhookNotificationsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) JobWebhookNotificationsOutputReference
```
diff --git a/docs/job.java.md b/docs/job.java.md
index c09505dc1..b494e4094 100644
--- a/docs/job.java.md
+++ b/docs/job.java.md
@@ -571,6 +571,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -708,6 +709,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -12680,6 +12687,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -12687,6 +12695,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -15862,6 +15886,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -15869,6 +15894,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -16359,6 +16400,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -16366,6 +16408,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -18675,6 +18733,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -18682,6 +18741,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -21605,6 +21680,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -21612,6 +21688,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -25768,6 +25860,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -25775,6 +25868,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -29193,6 +29302,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -29200,6 +29310,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -32123,6 +32249,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -32130,6 +32257,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -36284,6 +36427,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -36291,6 +36435,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -40312,6 +40472,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -40319,6 +40480,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -41422,6 +41599,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -41429,6 +41607,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -42219,6 +42413,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -42226,6 +42421,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -43471,6 +43682,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -43478,6 +43690,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -45787,6 +46015,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -45794,6 +46023,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -48717,6 +48962,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -48724,6 +48970,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -56421,6 +56683,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -56428,6 +56691,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -57260,6 +57539,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -57267,6 +57547,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -58745,6 +59041,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -58752,6 +59049,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -59184,6 +59497,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -59191,6 +59505,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -59623,6 +59953,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -59630,6 +59961,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -60062,6 +60409,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -60069,6 +60417,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -61867,6 +62231,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -61874,6 +62239,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -62306,6 +62687,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -62313,6 +62695,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -62745,6 +63143,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -62752,6 +63151,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -63184,6 +63599,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -63191,6 +63607,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/job.python.md b/docs/job.python.md
index 413e39891..8f3608f31 100644
--- a/docs/job.python.md
+++ b/docs/job.python.md
@@ -561,6 +561,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -703,6 +704,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -13354,6 +13361,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -13361,6 +13369,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -16799,6 +16825,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -16806,6 +16833,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -17331,6 +17376,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -17338,6 +17384,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -19867,6 +19931,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -19874,6 +19939,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -23058,6 +23141,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -23065,6 +23149,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -28029,6 +28131,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -28036,6 +28139,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -31799,6 +31920,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -31806,6 +31928,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -34990,6 +35130,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -34997,6 +35138,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -39674,6 +39833,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -39681,6 +39841,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -44012,6 +44190,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -44019,6 +44198,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -45209,6 +45406,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -45216,6 +45414,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -46066,6 +46282,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -46073,6 +46290,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -47451,6 +47686,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -47458,6 +47694,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -49987,6 +50241,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -49994,6 +50249,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -53178,6 +53451,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -53185,6 +53459,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -62249,6 +62541,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -62256,6 +62549,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -63150,6 +63461,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -63157,6 +63469,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -64810,6 +65140,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -64817,6 +65148,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -65284,6 +65633,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -65291,6 +65641,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -65758,6 +66126,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -65765,6 +66134,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -66232,6 +66619,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -66239,6 +66627,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -68202,6 +68608,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -68209,6 +68616,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -68676,6 +69101,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -68683,6 +69109,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -69150,6 +69594,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -69157,6 +69602,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -69624,6 +70087,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -69631,6 +70095,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/job.typescript.md b/docs/job.typescript.md
index e6900d7f6..daffdaeed 100644
--- a/docs/job.typescript.md
+++ b/docs/job.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -191,6 +192,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -11445,6 +11452,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -11452,6 +11460,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -14627,6 +14651,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -14634,6 +14659,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -15124,6 +15165,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -15131,6 +15173,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -17440,6 +17498,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -17447,6 +17506,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -20370,6 +20445,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -20377,6 +20453,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -24533,6 +24625,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -24540,6 +24633,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -27958,6 +28067,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -27965,6 +28075,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -30888,6 +31014,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -30895,6 +31022,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -35049,6 +35192,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -35056,6 +35200,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -39077,6 +39237,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -39084,6 +39245,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -40187,6 +40364,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -40194,6 +40372,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -40984,6 +41178,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -40991,6 +41186,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -42236,6 +42447,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -42243,6 +42455,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -44552,6 +44780,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -44559,6 +44788,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -47482,6 +47727,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -47489,6 +47735,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -55186,6 +55448,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -55193,6 +55456,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -56025,6 +56304,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -56032,6 +56312,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -57510,6 +57806,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -57517,6 +57814,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -57949,6 +58262,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -57956,6 +58270,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -58388,6 +58718,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -58395,6 +58726,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -58827,6 +59174,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -58834,6 +59182,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -60632,6 +60996,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -60639,6 +61004,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -61071,6 +61452,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -61078,6 +61460,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -61510,6 +61908,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -61517,6 +61916,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -61949,6 +62364,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -61956,6 +62372,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/library.csharp.md b/docs/library.csharp.md
index c4b55de68..68cfd367c 100644
--- a/docs/library.csharp.md
+++ b/docs/library.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -135,6 +136,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/library.go.md b/docs/library.go.md
index 7a4968ff0..2ade3aaa8 100644
--- a/docs/library.go.md
+++ b/docs/library.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
library.NewLibrary(scope Construct, id *string, config LibraryConfig) Library
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -135,6 +136,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -459,7 +466,7 @@ func ResetWhl()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
library.Library_IsConstruct(x interface{}) *bool
```
@@ -491,7 +498,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
library.Library_IsTerraformElement(x interface{}) *bool
```
@@ -505,7 +512,7 @@ library.Library_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
library.Library_IsTerraformResource(x interface{}) *bool
```
@@ -519,7 +526,7 @@ library.Library_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
library.Library_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -924,7 +931,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
&library.LibraryConfig {
Connection: interface{},
@@ -935,12 +942,12 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
Provider: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformProvider,
Provisioners: *[]interface{},
ClusterId: *string,
- Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.library.LibraryCran,
+ Cran: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.library.LibraryCran,
Egg: *string,
Id: *string,
Jar: *string,
- Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.library.LibraryMaven,
- Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.library.LibraryPypi,
+ Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.library.LibraryMaven,
+ Pypi: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.library.LibraryPypi,
Whl: *string,
}
```
@@ -1147,7 +1154,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
&library.LibraryCran {
Package: *string,
@@ -1193,7 +1200,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
&library.LibraryMaven {
Coordinates: *string,
@@ -1253,7 +1260,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
&library.LibraryPypi {
Package: *string,
@@ -1301,7 +1308,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
library.NewLibraryCranOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) LibraryCranOutputReference
```
@@ -1601,7 +1608,7 @@ func InternalValue() LibraryCran
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
library.NewLibraryMavenOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) LibraryMavenOutputReference
```
@@ -1930,7 +1937,7 @@ func InternalValue() LibraryMaven
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/library"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/library"
library.NewLibraryPypiOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) LibraryPypiOutputReference
```
diff --git a/docs/library.java.md b/docs/library.java.md
index 8359e3773..f0988cb0d 100644
--- a/docs/library.java.md
+++ b/docs/library.java.md
@@ -197,6 +197,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -278,6 +279,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/library.python.md b/docs/library.python.md
index a7ad28ec7..84dfa146d 100644
--- a/docs/library.python.md
+++ b/docs/library.python.md
@@ -195,6 +195,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -281,6 +282,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/library.typescript.md b/docs/library.typescript.md
index b71a5127e..48c9dae74 100644
--- a/docs/library.typescript.md
+++ b/docs/library.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -135,6 +136,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/metastore.csharp.md b/docs/metastore.csharp.md
index a6841e8d2..efc0d820b 100644
--- a/docs/metastore.csharp.md
+++ b/docs/metastore.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -142,6 +143,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/metastore.go.md b/docs/metastore.go.md
index 282a51b42..5999e6586 100644
--- a/docs/metastore.go.md
+++ b/docs/metastore.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastore"
metastore.NewMetastore(scope Construct, id *string, config MetastoreConfig) Metastore
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -142,6 +143,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -490,7 +497,7 @@ func ResetUpdatedBy()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastore"
metastore.Metastore_IsConstruct(x interface{}) *bool
```
@@ -522,7 +529,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastore"
metastore.Metastore_IsTerraformElement(x interface{}) *bool
```
@@ -536,7 +543,7 @@ metastore.Metastore_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastore"
metastore.Metastore_IsTerraformResource(x interface{}) *bool
```
@@ -550,7 +557,7 @@ metastore.Metastore_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastore"
metastore.Metastore_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1175,7 +1182,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastore"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastore"
&metastore.MetastoreConfig {
Connection: interface{},
diff --git a/docs/metastore.java.md b/docs/metastore.java.md
index 449b37d17..4a585189f 100644
--- a/docs/metastore.java.md
+++ b/docs/metastore.java.md
@@ -292,6 +292,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -380,6 +381,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/metastore.python.md b/docs/metastore.python.md
index 9aef7ce46..bdf493594 100644
--- a/docs/metastore.python.md
+++ b/docs/metastore.python.md
@@ -289,6 +289,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -382,6 +383,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/metastore.typescript.md b/docs/metastore.typescript.md
index d1c56558d..139bf8c01 100644
--- a/docs/metastore.typescript.md
+++ b/docs/metastore.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -142,6 +143,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/metastoreAssignment.csharp.md b/docs/metastoreAssignment.csharp.md
index 66540145a..1ae0146ba 100644
--- a/docs/metastoreAssignment.csharp.md
+++ b/docs/metastoreAssignment.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/metastoreAssignment.go.md b/docs/metastoreAssignment.go.md
index d495a3e54..663d605ff 100644
--- a/docs/metastoreAssignment.go.md
+++ b/docs/metastoreAssignment.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoreassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoreassignment"
metastoreassignment.NewMetastoreAssignment(scope Construct, id *string, config MetastoreAssignmentConfig) MetastoreAssignment
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -385,7 +392,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoreassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoreassignment"
metastoreassignment.MetastoreAssignment_IsConstruct(x interface{}) *bool
```
@@ -417,7 +424,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoreassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoreassignment"
metastoreassignment.MetastoreAssignment_IsTerraformElement(x interface{}) *bool
```
@@ -431,7 +438,7 @@ metastoreassignment.MetastoreAssignment_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoreassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoreassignment"
metastoreassignment.MetastoreAssignment_IsTerraformResource(x interface{}) *bool
```
@@ -445,7 +452,7 @@ metastoreassignment.MetastoreAssignment_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoreassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoreassignment"
metastoreassignment.MetastoreAssignment_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -762,7 +769,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoreassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoreassignment"
&metastoreassignment.MetastoreAssignmentConfig {
Connection: interface{},
diff --git a/docs/metastoreAssignment.java.md b/docs/metastoreAssignment.java.md
index d519651ca..f2a849be2 100644
--- a/docs/metastoreAssignment.java.md
+++ b/docs/metastoreAssignment.java.md
@@ -151,6 +151,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -224,6 +225,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/metastoreAssignment.python.md b/docs/metastoreAssignment.python.md
index ad3002e1a..2b68efc6c 100644
--- a/docs/metastoreAssignment.python.md
+++ b/docs/metastoreAssignment.python.md
@@ -149,6 +149,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -227,6 +228,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/metastoreAssignment.typescript.md b/docs/metastoreAssignment.typescript.md
index 7d3e48767..5a89a5e33 100644
--- a/docs/metastoreAssignment.typescript.md
+++ b/docs/metastoreAssignment.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/metastoreDataAccess.csharp.md b/docs/metastoreDataAccess.csharp.md
index b8abf3663..49118721c 100644
--- a/docs/metastoreDataAccess.csharp.md
+++ b/docs/metastoreDataAccess.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -143,6 +144,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/metastoreDataAccess.go.md b/docs/metastoreDataAccess.go.md
index c6132af9b..a712394f7 100644
--- a/docs/metastoreDataAccess.go.md
+++ b/docs/metastoreDataAccess.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.NewMetastoreDataAccess(scope Construct, id *string, config MetastoreDataAccessConfig) MetastoreDataAccess
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -143,6 +144,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -527,7 +534,7 @@ func ResetReadOnly()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.MetastoreDataAccess_IsConstruct(x interface{}) *bool
```
@@ -559,7 +566,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.MetastoreDataAccess_IsTerraformElement(x interface{}) *bool
```
@@ -573,7 +580,7 @@ metastoredataaccess.MetastoreDataAccess_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.MetastoreDataAccess_IsTerraformResource(x interface{}) *bool
```
@@ -587,7 +594,7 @@ metastoredataaccess.MetastoreDataAccess_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.MetastoreDataAccess_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1124,7 +1131,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
&metastoredataaccess.MetastoreDataAccessAwsIamRole {
RoleArn: *string,
@@ -1184,7 +1191,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
&metastoredataaccess.MetastoreDataAccessAzureManagedIdentity {
AccessConnectorId: *string,
@@ -1244,7 +1251,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
&metastoredataaccess.MetastoreDataAccessAzureServicePrincipal {
ApplicationId: *string,
@@ -1304,7 +1311,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
&metastoredataaccess.MetastoreDataAccessConfig {
Connection: interface{},
@@ -1315,14 +1322,14 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoreda
Provider: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformProvider,
Provisioners: *[]interface{},
Name: *string,
- AwsIamRole: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.metastoreDataAccess.MetastoreDataAccessAwsIamRole,
- AzureManagedIdentity: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.metastoreDataAccess.MetastoreDataAccessAzureManagedIdentity,
- AzureServicePrincipal: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.metastoreDataAccess.MetastoreDataAccessAzureServicePrincipal,
+ AwsIamRole: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.metastoreDataAccess.MetastoreDataAccessAwsIamRole,
+ AzureManagedIdentity: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.metastoreDataAccess.MetastoreDataAccessAzureManagedIdentity,
+ AzureServicePrincipal: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.metastoreDataAccess.MetastoreDataAccessAzureServicePrincipal,
Comment: *string,
- DatabricksGcpServiceAccount: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.metastoreDataAccess.MetastoreDataAccessDatabricksGcpServiceAccount,
+ DatabricksGcpServiceAccount: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.metastoreDataAccess.MetastoreDataAccessDatabricksGcpServiceAccount,
ForceDestroy: interface{},
ForceUpdate: interface{},
- GcpServiceAccountKey: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.metastoreDataAccess.MetastoreDataAccessGcpServiceAccountKey,
+ GcpServiceAccountKey: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.metastoreDataAccess.MetastoreDataAccessGcpServiceAccountKey,
Id: *string,
IsDefault: interface{},
MetastoreId: *string,
@@ -1615,7 +1622,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
&metastoredataaccess.MetastoreDataAccessDatabricksGcpServiceAccount {
CredentialId: *string,
@@ -1661,7 +1668,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
&metastoredataaccess.MetastoreDataAccessGcpServiceAccountKey {
Email: *string,
@@ -1723,7 +1730,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.NewMetastoreDataAccessAwsIamRoleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MetastoreDataAccessAwsIamRoleOutputReference
```
@@ -2052,7 +2059,7 @@ func InternalValue() MetastoreDataAccessAwsIamRole
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.NewMetastoreDataAccessAzureManagedIdentityOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MetastoreDataAccessAzureManagedIdentityOutputReference
```
@@ -2381,7 +2388,7 @@ func InternalValue() MetastoreDataAccessAzureManagedIdentity
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.NewMetastoreDataAccessAzureServicePrincipalOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MetastoreDataAccessAzureServicePrincipalOutputReference
```
@@ -2696,7 +2703,7 @@ func InternalValue() MetastoreDataAccessAzureServicePrincipal
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.NewMetastoreDataAccessDatabricksGcpServiceAccountOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MetastoreDataAccessDatabricksGcpServiceAccountOutputReference
```
@@ -3003,7 +3010,7 @@ func InternalValue() MetastoreDataAccessDatabricksGcpServiceAccount
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/metastoredataaccess"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/metastoredataaccess"
metastoredataaccess.NewMetastoreDataAccessGcpServiceAccountKeyOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MetastoreDataAccessGcpServiceAccountKeyOutputReference
```
diff --git a/docs/metastoreDataAccess.java.md b/docs/metastoreDataAccess.java.md
index 9bcf7f0c1..89b476446 100644
--- a/docs/metastoreDataAccess.java.md
+++ b/docs/metastoreDataAccess.java.md
@@ -265,6 +265,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -354,6 +355,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/metastoreDataAccess.python.md b/docs/metastoreDataAccess.python.md
index 46457f680..09c3eb392 100644
--- a/docs/metastoreDataAccess.python.md
+++ b/docs/metastoreDataAccess.python.md
@@ -259,6 +259,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -353,6 +354,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/metastoreDataAccess.typescript.md b/docs/metastoreDataAccess.typescript.md
index f640b4291..9c42ae54a 100644
--- a/docs/metastoreDataAccess.typescript.md
+++ b/docs/metastoreDataAccess.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -143,6 +144,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mlflowExperiment.csharp.md b/docs/mlflowExperiment.csharp.md
index aa607cc8f..3a3c61929 100644
--- a/docs/mlflowExperiment.csharp.md
+++ b/docs/mlflowExperiment.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mlflowExperiment.go.md b/docs/mlflowExperiment.go.md
index 07482dc80..ee273ca23 100644
--- a/docs/mlflowExperiment.go.md
+++ b/docs/mlflowExperiment.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexperiment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowexperiment"
mlflowexperiment.NewMlflowExperiment(scope Construct, id *string, config MlflowExperimentConfig) MlflowExperiment
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -433,7 +440,7 @@ func ResetLifecycleStage()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexperiment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowexperiment"
mlflowexperiment.MlflowExperiment_IsConstruct(x interface{}) *bool
```
@@ -465,7 +472,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexperiment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowexperiment"
mlflowexperiment.MlflowExperiment_IsTerraformElement(x interface{}) *bool
```
@@ -479,7 +486,7 @@ mlflowexperiment.MlflowExperiment_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexperiment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowexperiment"
mlflowexperiment.MlflowExperiment_IsTerraformResource(x interface{}) *bool
```
@@ -493,7 +500,7 @@ mlflowexperiment.MlflowExperiment_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexperiment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowexperiment"
mlflowexperiment.MlflowExperiment_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -920,7 +927,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexperiment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowexperiment"
&mlflowexperiment.MlflowExperimentConfig {
Connection: interface{},
@@ -938,7 +945,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexper
Id: *string,
LastUpdateTime: *f64,
LifecycleStage: *string,
- Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mlflowExperiment.MlflowExperimentTimeouts,
+ Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mlflowExperiment.MlflowExperimentTimeouts,
}
```
@@ -1153,7 +1160,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexperiment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowexperiment"
&mlflowexperiment.MlflowExperimentTimeouts {
@@ -1168,7 +1175,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexper
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowexperiment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowexperiment"
mlflowexperiment.NewMlflowExperimentTimeoutsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MlflowExperimentTimeoutsOutputReference
```
diff --git a/docs/mlflowExperiment.java.md b/docs/mlflowExperiment.java.md
index 3ec8fd3ee..0f216d933 100644
--- a/docs/mlflowExperiment.java.md
+++ b/docs/mlflowExperiment.java.md
@@ -203,6 +203,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -282,6 +283,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mlflowExperiment.python.md b/docs/mlflowExperiment.python.md
index 2afbc41b9..c09a15f47 100644
--- a/docs/mlflowExperiment.python.md
+++ b/docs/mlflowExperiment.python.md
@@ -201,6 +201,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -285,6 +286,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mlflowExperiment.typescript.md b/docs/mlflowExperiment.typescript.md
index 05c9106b5..aa9230408 100644
--- a/docs/mlflowExperiment.typescript.md
+++ b/docs/mlflowExperiment.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mlflowModel.csharp.md b/docs/mlflowModel.csharp.md
index cc67b321d..d13bbe5f0 100644
--- a/docs/mlflowModel.csharp.md
+++ b/docs/mlflowModel.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1045,6 +1052,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1052,6 +1060,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/mlflowModel.go.md b/docs/mlflowModel.go.md
index 66dac984f..5b7ac92eb 100644
--- a/docs/mlflowModel.go.md
+++ b/docs/mlflowModel.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowmodel"
mlflowmodel.NewMlflowModel(scope Construct, id *string, config MlflowModelConfig) MlflowModel
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -405,7 +412,7 @@ func ResetTags()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowmodel"
mlflowmodel.MlflowModel_IsConstruct(x interface{}) *bool
```
@@ -437,7 +444,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowmodel"
mlflowmodel.MlflowModel_IsTerraformElement(x interface{}) *bool
```
@@ -451,7 +458,7 @@ mlflowmodel.MlflowModel_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowmodel"
mlflowmodel.MlflowModel_IsTerraformResource(x interface{}) *bool
```
@@ -465,7 +472,7 @@ mlflowmodel.MlflowModel_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowmodel"
mlflowmodel.MlflowModel_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -793,7 +800,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowmodel"
&mlflowmodel.MlflowModelConfig {
Connection: interface{},
@@ -956,7 +963,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowmodel"
&mlflowmodel.MlflowModelTags {
Key: *string,
@@ -1004,7 +1011,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowmodel"
mlflowmodel.NewMlflowModelTagsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) MlflowModelTagsList
```
@@ -1045,6 +1052,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1052,6 +1060,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1147,7 +1171,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowmodel"
mlflowmodel.NewMlflowModelTagsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) MlflowModelTagsOutputReference
```
diff --git a/docs/mlflowModel.java.md b/docs/mlflowModel.java.md
index f51db0bca..c3b36166f 100644
--- a/docs/mlflowModel.java.md
+++ b/docs/mlflowModel.java.md
@@ -154,6 +154,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -229,6 +230,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1152,6 +1159,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1159,6 +1167,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/mlflowModel.python.md b/docs/mlflowModel.python.md
index f382d929a..93f071dfb 100644
--- a/docs/mlflowModel.python.md
+++ b/docs/mlflowModel.python.md
@@ -151,6 +151,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -231,6 +232,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1196,6 +1203,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1203,6 +1211,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/mlflowModel.typescript.md b/docs/mlflowModel.typescript.md
index 655dc752a..d89f27d90 100644
--- a/docs/mlflowModel.typescript.md
+++ b/docs/mlflowModel.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1030,6 +1037,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1037,6 +1045,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/mlflowWebhook.csharp.md b/docs/mlflowWebhook.csharp.md
index bd6133a37..845459bd4 100644
--- a/docs/mlflowWebhook.csharp.md
+++ b/docs/mlflowWebhook.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mlflowWebhook.go.md b/docs/mlflowWebhook.go.md
index a64ea2a4d..7f58119d4 100644
--- a/docs/mlflowWebhook.go.md
+++ b/docs/mlflowWebhook.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
mlflowwebhook.NewMlflowWebhook(scope Construct, id *string, config MlflowWebhookConfig) MlflowWebhook
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -439,7 +446,7 @@ func ResetStatus()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
mlflowwebhook.MlflowWebhook_IsConstruct(x interface{}) *bool
```
@@ -471,7 +478,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
mlflowwebhook.MlflowWebhook_IsTerraformElement(x interface{}) *bool
```
@@ -485,7 +492,7 @@ mlflowwebhook.MlflowWebhook_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
mlflowwebhook.MlflowWebhook_IsTerraformResource(x interface{}) *bool
```
@@ -499,7 +506,7 @@ mlflowwebhook.MlflowWebhook_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
mlflowwebhook.MlflowWebhook_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -882,7 +889,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
&mlflowwebhook.MlflowWebhookConfig {
Connection: interface{},
@@ -894,9 +901,9 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebho
Provisioners: *[]interface{},
Events: *[]*string,
Description: *string,
- HttpUrlSpec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mlflowWebhook.MlflowWebhookHttpUrlSpec,
+ HttpUrlSpec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mlflowWebhook.MlflowWebhookHttpUrlSpec,
Id: *string,
- JobSpec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mlflowWebhook.MlflowWebhookJobSpec,
+ JobSpec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mlflowWebhook.MlflowWebhookJobSpec,
ModelName: *string,
Status: *string,
}
@@ -1089,7 +1096,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
&mlflowwebhook.MlflowWebhookHttpUrlSpec {
Url: *string,
@@ -1163,7 +1170,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
&mlflowwebhook.MlflowWebhookJobSpec {
AccessToken: *string,
@@ -1225,7 +1232,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
mlflowwebhook.NewMlflowWebhookHttpUrlSpecOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MlflowWebhookHttpUrlSpecOutputReference
```
@@ -1583,7 +1590,7 @@ func InternalValue() MlflowWebhookHttpUrlSpec
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mlflowwebhook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mlflowwebhook"
mlflowwebhook.NewMlflowWebhookJobSpecOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MlflowWebhookJobSpecOutputReference
```
diff --git a/docs/mlflowWebhook.java.md b/docs/mlflowWebhook.java.md
index 22cb08568..1d84c8ed1 100644
--- a/docs/mlflowWebhook.java.md
+++ b/docs/mlflowWebhook.java.md
@@ -185,6 +185,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -264,6 +265,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mlflowWebhook.python.md b/docs/mlflowWebhook.python.md
index 683a4f80f..48d3c9ce1 100644
--- a/docs/mlflowWebhook.python.md
+++ b/docs/mlflowWebhook.python.md
@@ -183,6 +183,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -267,6 +268,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mlflowWebhook.typescript.md b/docs/mlflowWebhook.typescript.md
index d68144c5a..c5a9cc612 100644
--- a/docs/mlflowWebhook.typescript.md
+++ b/docs/mlflowWebhook.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/modelServing.csharp.md b/docs/modelServing.csharp.md
index 4a23a1acf..ebadadb47 100644
--- a/docs/modelServing.csharp.md
+++ b/docs/modelServing.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -2365,6 +2372,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2372,6 +2380,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -3277,6 +3301,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -3284,6 +3309,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -3731,6 +3772,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -3738,6 +3780,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -4214,6 +4272,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -4221,6 +4280,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/modelServing.go.md b/docs/modelServing.go.md
index 51eca5418..d09a6ecd2 100644
--- a/docs/modelServing.go.md
+++ b/docs/modelServing.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServing(scope Construct, id *string, config ModelServingConfig) ModelServing
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -451,7 +458,7 @@ func ResetTimeouts()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.ModelServing_IsConstruct(x interface{}) *bool
```
@@ -483,7 +490,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.ModelServing_IsTerraformElement(x interface{}) *bool
```
@@ -497,7 +504,7 @@ modelserving.ModelServing_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.ModelServing_IsTerraformResource(x interface{}) *bool
```
@@ -511,7 +518,7 @@ modelserving.ModelServing_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.ModelServing_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -883,7 +890,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
&modelserving.ModelServingConfig {
Connection: interface{},
@@ -893,12 +900,12 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelservin
Lifecycle: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformResourceLifecycle,
Provider: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformProvider,
Provisioners: *[]interface{},
- Config: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.modelServing.ModelServingConfigA,
+ Config: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.modelServing.ModelServingConfigA,
Name: *string,
Id: *string,
RateLimits: interface{},
Tags: interface{},
- Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.modelServing.ModelServingTimeouts,
+ Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.modelServing.ModelServingTimeouts,
}
```
@@ -1080,12 +1087,12 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
&modelserving.ModelServingConfigA {
- AutoCaptureConfig: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.modelServing.ModelServingConfigAutoCaptureConfig,
+ AutoCaptureConfig: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.modelServing.ModelServingConfigAutoCaptureConfig,
ServedModels: interface{},
- TrafficConfig: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.modelServing.ModelServingConfigTrafficConfig,
+ TrafficConfig: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.modelServing.ModelServingConfigTrafficConfig,
}
```
@@ -1146,7 +1153,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
&modelserving.ModelServingConfigAutoCaptureConfig {
CatalogName: *string,
@@ -1220,7 +1227,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
&modelserving.ModelServingConfigServedModels {
ModelName: *string,
@@ -1350,7 +1357,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
&modelserving.ModelServingConfigTrafficConfig {
Routes: interface{},
@@ -1384,7 +1391,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
&modelserving.ModelServingConfigTrafficConfigRoutes {
ServedModelName: *string,
@@ -1430,7 +1437,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
&modelserving.ModelServingRateLimits {
Calls: *f64,
@@ -1490,7 +1497,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
&modelserving.ModelServingTags {
Key: *string,
@@ -1536,7 +1543,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
&modelserving.ModelServingTimeouts {
Create: *string,
@@ -1584,7 +1591,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingConfigAOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ModelServingConfigAOutputReference
```
@@ -1959,7 +1966,7 @@ func InternalValue() ModelServingConfigA
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingConfigAutoCaptureConfigOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ModelServingConfigAutoCaptureConfigOutputReference
```
@@ -2324,7 +2331,7 @@ func InternalValue() ModelServingConfigAutoCaptureConfig
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingConfigServedModelsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ModelServingConfigServedModelsList
```
@@ -2365,6 +2372,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2372,6 +2380,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2467,7 +2491,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingConfigServedModelsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ModelServingConfigServedModelsOutputReference
```
@@ -2945,7 +2969,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingConfigTrafficConfigOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ModelServingConfigTrafficConfigOutputReference
```
@@ -3236,7 +3260,7 @@ func InternalValue() ModelServingConfigTrafficConfig
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingConfigTrafficConfigRoutesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ModelServingConfigTrafficConfigRoutesList
```
@@ -3277,6 +3301,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -3284,6 +3309,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -3379,7 +3420,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingConfigTrafficConfigRoutesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ModelServingConfigTrafficConfigRoutesOutputReference
```
@@ -3690,7 +3731,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingRateLimitsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ModelServingRateLimitsList
```
@@ -3731,6 +3772,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -3738,6 +3780,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -3833,7 +3891,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingRateLimitsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ModelServingRateLimitsOutputReference
```
@@ -4173,7 +4231,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingTagsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ModelServingTagsList
```
@@ -4214,6 +4272,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -4221,6 +4280,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -4316,7 +4391,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingTagsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ModelServingTagsOutputReference
```
@@ -4634,7 +4709,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/modelserving"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/modelserving"
modelserving.NewModelServingTimeoutsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) ModelServingTimeoutsOutputReference
```
diff --git a/docs/modelServing.java.md b/docs/modelServing.java.md
index 4e9f87e67..6aef065b9 100644
--- a/docs/modelServing.java.md
+++ b/docs/modelServing.java.md
@@ -181,6 +181,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -260,6 +261,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -2504,6 +2511,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2511,6 +2519,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -3416,6 +3440,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -3423,6 +3448,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -3870,6 +3911,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -3877,6 +3919,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -4353,6 +4411,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -4360,6 +4419,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/modelServing.python.md b/docs/modelServing.python.md
index 9b15f97de..2a17b29b1 100644
--- a/docs/modelServing.python.md
+++ b/docs/modelServing.python.md
@@ -177,6 +177,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -261,6 +262,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -2674,6 +2681,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2681,6 +2689,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -3648,6 +3674,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -3655,6 +3682,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -4137,6 +4182,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -4144,6 +4190,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -4655,6 +4719,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -4662,6 +4727,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/modelServing.typescript.md b/docs/modelServing.typescript.md
index f7b7cbe46..d4c91f8b9 100644
--- a/docs/modelServing.typescript.md
+++ b/docs/modelServing.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -2318,6 +2325,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2325,6 +2333,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -3230,6 +3254,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -3237,6 +3262,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -3684,6 +3725,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -3691,6 +3733,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -4167,6 +4225,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -4174,6 +4233,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/mount.csharp.md b/docs/mount.csharp.md
index e101153c6..1d87069b8 100644
--- a/docs/mount.csharp.md
+++ b/docs/mount.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -144,6 +145,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mount.go.md b/docs/mount.go.md
index 19f12e54d..0b979445f 100644
--- a/docs/mount.go.md
+++ b/docs/mount.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.NewMount(scope Construct, id *string, config MountConfig) Mount
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -144,6 +145,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -540,7 +547,7 @@ func ResetWasb()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.Mount_IsConstruct(x interface{}) *bool
```
@@ -572,7 +579,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.Mount_IsTerraformElement(x interface{}) *bool
```
@@ -586,7 +593,7 @@ mount.Mount_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.Mount_IsTerraformResource(x interface{}) *bool
```
@@ -600,7 +607,7 @@ mount.Mount_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.Mount_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1126,7 +1133,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
&mount.MountAbfs {
ClientId: *string,
@@ -1256,7 +1263,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
&mount.MountAdl {
ClientId: *string,
@@ -1372,7 +1379,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
&mount.MountConfig {
Connection: interface{},
@@ -1382,19 +1389,19 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
Lifecycle: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformResourceLifecycle,
Provider: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformProvider,
Provisioners: *[]interface{},
- Abfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mount.MountAbfs,
- Adl: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mount.MountAdl,
+ Abfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mount.MountAbfs,
+ Adl: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mount.MountAdl,
ClusterId: *string,
EncryptionType: *string,
ExtraConfigs: *map[string]*string,
- Gs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mount.MountGs,
+ Gs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mount.MountGs,
Id: *string,
Name: *string,
ResourceId: *string,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mount.MountS3,
- Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mount.MountTimeouts,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mount.MountS3,
+ Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mount.MountTimeouts,
Uri: *string,
- Wasb: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mount.MountWasb,
+ Wasb: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mount.MountWasb,
}
```
@@ -1671,7 +1678,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
&mount.MountGs {
BucketName: *string,
@@ -1717,7 +1724,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
&mount.MountS3 {
BucketName: *string,
@@ -1763,7 +1770,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
&mount.MountTimeouts {
Default: *string,
@@ -1795,7 +1802,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
&mount.MountWasb {
AuthType: *string,
@@ -1899,7 +1906,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.NewMountAbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MountAbfsOutputReference
```
@@ -2352,7 +2359,7 @@ func InternalValue() MountAbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.NewMountAdlOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MountAdlOutputReference
```
@@ -2783,7 +2790,7 @@ func InternalValue() MountAdl
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.NewMountGsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MountGsOutputReference
```
@@ -3083,7 +3090,7 @@ func InternalValue() MountGs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.NewMountS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MountS3OutputReference
```
@@ -3383,7 +3390,7 @@ func InternalValue() MountS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.NewMountTimeoutsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MountTimeoutsOutputReference
```
@@ -3661,7 +3668,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mount"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mount"
mount.NewMountWasbOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MountWasbOutputReference
```
diff --git a/docs/mount.java.md b/docs/mount.java.md
index 2aac12bd6..3185bc4ed 100644
--- a/docs/mount.java.md
+++ b/docs/mount.java.md
@@ -253,6 +253,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -343,6 +344,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mount.python.md b/docs/mount.python.md
index de89c11a9..8d5dc5188 100644
--- a/docs/mount.python.md
+++ b/docs/mount.python.md
@@ -251,6 +251,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -346,6 +347,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mount.typescript.md b/docs/mount.typescript.md
index 81cb064d6..7514f7146 100644
--- a/docs/mount.typescript.md
+++ b/docs/mount.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -144,6 +145,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mwsCredentials.csharp.md b/docs/mwsCredentials.csharp.md
index 5c740b8b4..0168710ed 100644
--- a/docs/mwsCredentials.csharp.md
+++ b/docs/mwsCredentials.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mwsCredentials.go.md b/docs/mwsCredentials.go.md
index 59b5c5c15..604b51189 100644
--- a/docs/mwsCredentials.go.md
+++ b/docs/mwsCredentials.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscredentials"
mwscredentials.NewMwsCredentials(scope Construct, id *string, config MwsCredentialsConfig) MwsCredentials
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -406,7 +413,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscredentials"
mwscredentials.MwsCredentials_IsConstruct(x interface{}) *bool
```
@@ -438,7 +445,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscredentials"
mwscredentials.MwsCredentials_IsTerraformElement(x interface{}) *bool
```
@@ -452,7 +459,7 @@ mwscredentials.MwsCredentials_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscredentials"
mwscredentials.MwsCredentials_IsTerraformResource(x interface{}) *bool
```
@@ -466,7 +473,7 @@ mwscredentials.MwsCredentials_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscredentials"
mwscredentials.MwsCredentials_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -849,7 +856,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscredentials"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscredentials"
&mwscredentials.MwsCredentialsConfig {
Connection: interface{},
diff --git a/docs/mwsCredentials.java.md b/docs/mwsCredentials.java.md
index 0d6053f39..d472223f4 100644
--- a/docs/mwsCredentials.java.md
+++ b/docs/mwsCredentials.java.md
@@ -181,6 +181,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -257,6 +258,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mwsCredentials.python.md b/docs/mwsCredentials.python.md
index 8ec9a6f97..583f46681 100644
--- a/docs/mwsCredentials.python.md
+++ b/docs/mwsCredentials.python.md
@@ -179,6 +179,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -260,6 +261,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mwsCredentials.typescript.md b/docs/mwsCredentials.typescript.md
index 816316053..d4f0dd228 100644
--- a/docs/mwsCredentials.typescript.md
+++ b/docs/mwsCredentials.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mwsCustomerManagedKeys.csharp.md b/docs/mwsCustomerManagedKeys.csharp.md
index 10ed68544..7c7701ff9 100644
--- a/docs/mwsCustomerManagedKeys.csharp.md
+++ b/docs/mwsCustomerManagedKeys.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mwsCustomerManagedKeys.go.md b/docs/mwsCustomerManagedKeys.go.md
index c66eba0ef..3e5b904d1 100644
--- a/docs/mwsCustomerManagedKeys.go.md
+++ b/docs/mwsCustomerManagedKeys.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
mwscustomermanagedkeys.NewMwsCustomerManagedKeys(scope Construct, id *string, config MwsCustomerManagedKeysConfig) MwsCustomerManagedKeys
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -432,7 +439,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
mwscustomermanagedkeys.MwsCustomerManagedKeys_IsConstruct(x interface{}) *bool
```
@@ -464,7 +471,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
mwscustomermanagedkeys.MwsCustomerManagedKeys_IsTerraformElement(x interface{}) *bool
```
@@ -478,7 +485,7 @@ mwscustomermanagedkeys.MwsCustomerManagedKeys_IsTerraformElement(x interface{})
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
mwscustomermanagedkeys.MwsCustomerManagedKeys_IsTerraformResource(x interface{}) *bool
```
@@ -492,7 +499,7 @@ mwscustomermanagedkeys.MwsCustomerManagedKeys_IsTerraformResource(x interface{})
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
mwscustomermanagedkeys.MwsCustomerManagedKeys_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -875,7 +882,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
&mwscustomermanagedkeys.MwsCustomerManagedKeysAwsKeyInfo {
KeyArn: *string,
@@ -935,7 +942,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
&mwscustomermanagedkeys.MwsCustomerManagedKeysConfig {
Connection: interface{},
@@ -947,10 +954,10 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomer
Provisioners: *[]interface{},
AccountId: *string,
UseCases: *[]*string,
- AwsKeyInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsCustomerManagedKeys.MwsCustomerManagedKeysAwsKeyInfo,
+ AwsKeyInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsCustomerManagedKeys.MwsCustomerManagedKeysAwsKeyInfo,
CreationTime: *f64,
CustomerManagedKeyId: *string,
- GcpKeyInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsCustomerManagedKeys.MwsCustomerManagedKeysGcpKeyInfo,
+ GcpKeyInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsCustomerManagedKeys.MwsCustomerManagedKeysGcpKeyInfo,
Id: *string,
}
```
@@ -1142,7 +1149,7 @@ If you experience problems setting this value it might not be settable. Please t
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
&mwscustomermanagedkeys.MwsCustomerManagedKeysGcpKeyInfo {
KmsKeyId: *string,
@@ -1176,7 +1183,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
mwscustomermanagedkeys.NewMwsCustomerManagedKeysAwsKeyInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsCustomerManagedKeysAwsKeyInfoOutputReference
```
@@ -1505,7 +1512,7 @@ func InternalValue() MwsCustomerManagedKeysAwsKeyInfo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwscustomermanagedkeys"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwscustomermanagedkeys"
mwscustomermanagedkeys.NewMwsCustomerManagedKeysGcpKeyInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsCustomerManagedKeysGcpKeyInfoOutputReference
```
diff --git a/docs/mwsCustomerManagedKeys.java.md b/docs/mwsCustomerManagedKeys.java.md
index eb14db59c..cd16ca012 100644
--- a/docs/mwsCustomerManagedKeys.java.md
+++ b/docs/mwsCustomerManagedKeys.java.md
@@ -185,6 +185,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -263,6 +264,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mwsCustomerManagedKeys.python.md b/docs/mwsCustomerManagedKeys.python.md
index bc11d5220..23e05cae1 100644
--- a/docs/mwsCustomerManagedKeys.python.md
+++ b/docs/mwsCustomerManagedKeys.python.md
@@ -183,6 +183,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -266,6 +267,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mwsCustomerManagedKeys.typescript.md b/docs/mwsCustomerManagedKeys.typescript.md
index 06e19c0d9..099cf3d68 100644
--- a/docs/mwsCustomerManagedKeys.typescript.md
+++ b/docs/mwsCustomerManagedKeys.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mwsLogDelivery.csharp.md b/docs/mwsLogDelivery.csharp.md
index 665718a26..06f691261 100644
--- a/docs/mwsLogDelivery.csharp.md
+++ b/docs/mwsLogDelivery.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mwsLogDelivery.go.md b/docs/mwsLogDelivery.go.md
index 23ce1610e..4c20dbaac 100644
--- a/docs/mwsLogDelivery.go.md
+++ b/docs/mwsLogDelivery.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwslogdelivery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwslogdelivery"
mwslogdelivery.NewMwsLogDelivery(scope Construct, id *string, config MwsLogDeliveryConfig) MwsLogDelivery
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -420,7 +427,7 @@ func ResetWorkspaceIdsFilter()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwslogdelivery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwslogdelivery"
mwslogdelivery.MwsLogDelivery_IsConstruct(x interface{}) *bool
```
@@ -452,7 +459,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwslogdelivery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwslogdelivery"
mwslogdelivery.MwsLogDelivery_IsTerraformElement(x interface{}) *bool
```
@@ -466,7 +473,7 @@ mwslogdelivery.MwsLogDelivery_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwslogdelivery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwslogdelivery"
mwslogdelivery.MwsLogDelivery_IsTerraformResource(x interface{}) *bool
```
@@ -480,7 +487,7 @@ mwslogdelivery.MwsLogDelivery_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwslogdelivery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwslogdelivery"
mwslogdelivery.MwsLogDelivery_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -973,7 +980,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwslogdelivery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwslogdelivery"
&mwslogdelivery.MwsLogDeliveryConfig {
Connection: interface{},
diff --git a/docs/mwsLogDelivery.java.md b/docs/mwsLogDelivery.java.md
index abf882d74..ae06073ee 100644
--- a/docs/mwsLogDelivery.java.md
+++ b/docs/mwsLogDelivery.java.md
@@ -231,6 +231,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -309,6 +310,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mwsLogDelivery.python.md b/docs/mwsLogDelivery.python.md
index d7a48d693..3fd5c33bd 100644
--- a/docs/mwsLogDelivery.python.md
+++ b/docs/mwsLogDelivery.python.md
@@ -229,6 +229,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -312,6 +313,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mwsLogDelivery.typescript.md b/docs/mwsLogDelivery.typescript.md
index 661799f25..4d30246fe 100644
--- a/docs/mwsLogDelivery.typescript.md
+++ b/docs/mwsLogDelivery.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mwsNetworks.csharp.md b/docs/mwsNetworks.csharp.md
index 10b3e2ade..6881b784b 100644
--- a/docs/mwsNetworks.csharp.md
+++ b/docs/mwsNetworks.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -139,6 +140,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1592,6 +1599,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1599,6 +1607,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/mwsNetworks.go.md b/docs/mwsNetworks.go.md
index 8448d5b99..d92139114 100644
--- a/docs/mwsNetworks.go.md
+++ b/docs/mwsNetworks.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
mwsnetworks.NewMwsNetworks(scope Construct, id *string, config MwsNetworksConfig) MwsNetworks
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -139,6 +140,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -487,7 +494,7 @@ func ResetWorkspaceId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
mwsnetworks.MwsNetworks_IsConstruct(x interface{}) *bool
```
@@ -519,7 +526,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
mwsnetworks.MwsNetworks_IsTerraformElement(x interface{}) *bool
```
@@ -533,7 +540,7 @@ mwsnetworks.MwsNetworks_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
mwsnetworks.MwsNetworks_IsTerraformResource(x interface{}) *bool
```
@@ -547,7 +554,7 @@ mwsnetworks.MwsNetworks_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
mwsnetworks.MwsNetworks_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1062,7 +1069,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
&mwsnetworks.MwsNetworksConfig {
Connection: interface{},
@@ -1076,12 +1083,12 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks
NetworkName: *string,
CreationTime: *f64,
ErrorMessages: interface{},
- GcpNetworkInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsNetworks.MwsNetworksGcpNetworkInfo,
+ GcpNetworkInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsNetworks.MwsNetworksGcpNetworkInfo,
Id: *string,
NetworkId: *string,
SecurityGroupIds: *[]*string,
SubnetIds: *[]*string,
- VpcEndpoints: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsNetworks.MwsNetworksVpcEndpoints,
+ VpcEndpoints: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsNetworks.MwsNetworksVpcEndpoints,
VpcId: *string,
VpcStatus: *string,
WorkspaceId: *f64,
@@ -1355,7 +1362,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
&mwsnetworks.MwsNetworksErrorMessages {
ErrorMessage: *string,
@@ -1401,7 +1408,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
&mwsnetworks.MwsNetworksGcpNetworkInfo {
NetworkProjectId: *string,
@@ -1503,7 +1510,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
&mwsnetworks.MwsNetworksVpcEndpoints {
DataplaneRelay: *[]*string,
@@ -1551,7 +1558,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
mwsnetworks.NewMwsNetworksErrorMessagesList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) MwsNetworksErrorMessagesList
```
@@ -1592,6 +1599,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1599,6 +1607,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1694,7 +1718,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
mwsnetworks.NewMwsNetworksErrorMessagesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) MwsNetworksErrorMessagesOutputReference
```
@@ -2019,7 +2043,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
mwsnetworks.NewMwsNetworksGcpNetworkInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsNetworksGcpNetworkInfoOutputReference
```
@@ -2400,7 +2424,7 @@ func InternalValue() MwsNetworksGcpNetworkInfo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsnetworks"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsnetworks"
mwsnetworks.NewMwsNetworksVpcEndpointsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsNetworksVpcEndpointsOutputReference
```
diff --git a/docs/mwsNetworks.java.md b/docs/mwsNetworks.java.md
index 411e25c83..a6f456056 100644
--- a/docs/mwsNetworks.java.md
+++ b/docs/mwsNetworks.java.md
@@ -248,6 +248,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -333,6 +334,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1793,6 +1800,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1800,6 +1808,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/mwsNetworks.python.md b/docs/mwsNetworks.python.md
index ef5dd4911..e108e3ebc 100644
--- a/docs/mwsNetworks.python.md
+++ b/docs/mwsNetworks.python.md
@@ -245,6 +245,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -335,6 +336,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1899,6 +1906,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1906,6 +1914,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/mwsNetworks.typescript.md b/docs/mwsNetworks.typescript.md
index 8cceaa596..2c43e7190 100644
--- a/docs/mwsNetworks.typescript.md
+++ b/docs/mwsNetworks.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -139,6 +140,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1558,6 +1565,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1565,6 +1573,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/mwsPermissionAssignment.csharp.md b/docs/mwsPermissionAssignment.csharp.md
index d0b66baa6..b9ccea61b 100644
--- a/docs/mwsPermissionAssignment.csharp.md
+++ b/docs/mwsPermissionAssignment.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mwsPermissionAssignment.go.md b/docs/mwsPermissionAssignment.go.md
index 283f8d7f6..6b5a9aea3 100644
--- a/docs/mwsPermissionAssignment.go.md
+++ b/docs/mwsPermissionAssignment.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwspermissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwspermissionassignment"
mwspermissionassignment.NewMwsPermissionAssignment(scope Construct, id *string, config MwsPermissionAssignmentConfig) MwsPermissionAssignment
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwspermissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwspermissionassignment"
mwspermissionassignment.MwsPermissionAssignment_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwspermissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwspermissionassignment"
mwspermissionassignment.MwsPermissionAssignment_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ mwspermissionassignment.MwsPermissionAssignment_IsTerraformElement(x interface{}
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwspermissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwspermissionassignment"
mwspermissionassignment.MwsPermissionAssignment_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ mwspermissionassignment.MwsPermissionAssignment_IsTerraformResource(x interface{
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwspermissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwspermissionassignment"
mwspermissionassignment.MwsPermissionAssignment_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -755,7 +762,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwspermissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwspermissionassignment"
&mwspermissionassignment.MwsPermissionAssignmentConfig {
Connection: interface{},
diff --git a/docs/mwsPermissionAssignment.java.md b/docs/mwsPermissionAssignment.java.md
index 0db4fc268..91f468c88 100644
--- a/docs/mwsPermissionAssignment.java.md
+++ b/docs/mwsPermissionAssignment.java.md
@@ -151,6 +151,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -223,6 +224,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mwsPermissionAssignment.python.md b/docs/mwsPermissionAssignment.python.md
index b2aa9453f..3e8c77154 100644
--- a/docs/mwsPermissionAssignment.python.md
+++ b/docs/mwsPermissionAssignment.python.md
@@ -149,6 +149,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -226,6 +227,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mwsPermissionAssignment.typescript.md b/docs/mwsPermissionAssignment.typescript.md
index bf0bdc1c0..5ab4833c8 100644
--- a/docs/mwsPermissionAssignment.typescript.md
+++ b/docs/mwsPermissionAssignment.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mwsPrivateAccessSettings.csharp.md b/docs/mwsPrivateAccessSettings.csharp.md
index 27b4e0ff3..8ed689e0a 100644
--- a/docs/mwsPrivateAccessSettings.csharp.md
+++ b/docs/mwsPrivateAccessSettings.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mwsPrivateAccessSettings.go.md b/docs/mwsPrivateAccessSettings.go.md
index a936c0c01..e8ee8088a 100644
--- a/docs/mwsPrivateAccessSettings.go.md
+++ b/docs/mwsPrivateAccessSettings.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsprivateaccesssettings"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsprivateaccesssettings"
mwsprivateaccesssettings.NewMwsPrivateAccessSettings(scope Construct, id *string, config MwsPrivateAccessSettingsConfig) MwsPrivateAccessSettings
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -420,7 +427,7 @@ func ResetStatus()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsprivateaccesssettings"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsprivateaccesssettings"
mwsprivateaccesssettings.MwsPrivateAccessSettings_IsConstruct(x interface{}) *bool
```
@@ -452,7 +459,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsprivateaccesssettings"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsprivateaccesssettings"
mwsprivateaccesssettings.MwsPrivateAccessSettings_IsTerraformElement(x interface{}) *bool
```
@@ -466,7 +473,7 @@ mwsprivateaccesssettings.MwsPrivateAccessSettings_IsTerraformElement(x interface
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsprivateaccesssettings"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsprivateaccesssettings"
mwsprivateaccesssettings.MwsPrivateAccessSettings_IsTerraformResource(x interface{}) *bool
```
@@ -480,7 +487,7 @@ mwsprivateaccesssettings.MwsPrivateAccessSettings_IsTerraformResource(x interfac
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsprivateaccesssettings"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsprivateaccesssettings"
mwsprivateaccesssettings.MwsPrivateAccessSettings_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -907,7 +914,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsprivateaccesssettings"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsprivateaccesssettings"
&mwsprivateaccesssettings.MwsPrivateAccessSettingsConfig {
Connection: interface{},
diff --git a/docs/mwsPrivateAccessSettings.java.md b/docs/mwsPrivateAccessSettings.java.md
index c26a173af..14972ae6a 100644
--- a/docs/mwsPrivateAccessSettings.java.md
+++ b/docs/mwsPrivateAccessSettings.java.md
@@ -202,6 +202,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -280,6 +281,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mwsPrivateAccessSettings.python.md b/docs/mwsPrivateAccessSettings.python.md
index 8b4ab8bc3..e6e6f2f58 100644
--- a/docs/mwsPrivateAccessSettings.python.md
+++ b/docs/mwsPrivateAccessSettings.python.md
@@ -199,6 +199,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -282,6 +283,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mwsPrivateAccessSettings.typescript.md b/docs/mwsPrivateAccessSettings.typescript.md
index be8b19c98..4aa93c6b4 100644
--- a/docs/mwsPrivateAccessSettings.typescript.md
+++ b/docs/mwsPrivateAccessSettings.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mwsStorageConfigurations.csharp.md b/docs/mwsStorageConfigurations.csharp.md
index 293edb67e..507c3d82a 100644
--- a/docs/mwsStorageConfigurations.csharp.md
+++ b/docs/mwsStorageConfigurations.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mwsStorageConfigurations.go.md b/docs/mwsStorageConfigurations.go.md
index 9f78a8790..3445fbe89 100644
--- a/docs/mwsStorageConfigurations.go.md
+++ b/docs/mwsStorageConfigurations.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsstorageconfigurations"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsstorageconfigurations"
mwsstorageconfigurations.NewMwsStorageConfigurations(scope Construct, id *string, config MwsStorageConfigurationsConfig) MwsStorageConfigurations
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsstorageconfigurations"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsstorageconfigurations"
mwsstorageconfigurations.MwsStorageConfigurations_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsstorageconfigurations"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsstorageconfigurations"
mwsstorageconfigurations.MwsStorageConfigurations_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ mwsstorageconfigurations.MwsStorageConfigurations_IsTerraformElement(x interface
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsstorageconfigurations"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsstorageconfigurations"
mwsstorageconfigurations.MwsStorageConfigurations_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ mwsstorageconfigurations.MwsStorageConfigurations_IsTerraformResource(x interfac
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsstorageconfigurations"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsstorageconfigurations"
mwsstorageconfigurations.MwsStorageConfigurations_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -777,7 +784,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsstorageconfigurations"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsstorageconfigurations"
&mwsstorageconfigurations.MwsStorageConfigurationsConfig {
Connection: interface{},
diff --git a/docs/mwsStorageConfigurations.java.md b/docs/mwsStorageConfigurations.java.md
index a2bfb4b44..cb4f307d4 100644
--- a/docs/mwsStorageConfigurations.java.md
+++ b/docs/mwsStorageConfigurations.java.md
@@ -151,6 +151,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -223,6 +224,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mwsStorageConfigurations.python.md b/docs/mwsStorageConfigurations.python.md
index 28cac2668..6d0f76ad5 100644
--- a/docs/mwsStorageConfigurations.python.md
+++ b/docs/mwsStorageConfigurations.python.md
@@ -149,6 +149,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -226,6 +227,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mwsStorageConfigurations.typescript.md b/docs/mwsStorageConfigurations.typescript.md
index 731466e85..3953167ae 100644
--- a/docs/mwsStorageConfigurations.typescript.md
+++ b/docs/mwsStorageConfigurations.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mwsVpcEndpoint.csharp.md b/docs/mwsVpcEndpoint.csharp.md
index bfa1731d1..17dd5a99d 100644
--- a/docs/mwsVpcEndpoint.csharp.md
+++ b/docs/mwsVpcEndpoint.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -136,6 +137,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mwsVpcEndpoint.go.md b/docs/mwsVpcEndpoint.go.md
index 2af49b98e..2c9f90ff0 100644
--- a/docs/mwsVpcEndpoint.go.md
+++ b/docs/mwsVpcEndpoint.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsvpcendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsvpcendpoint"
mwsvpcendpoint.NewMwsVpcEndpoint(scope Construct, id *string, config MwsVpcEndpointConfig) MwsVpcEndpoint
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -136,6 +137,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -454,7 +461,7 @@ func ResetVpcEndpointId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsvpcendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsvpcendpoint"
mwsvpcendpoint.MwsVpcEndpoint_IsConstruct(x interface{}) *bool
```
@@ -486,7 +493,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsvpcendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsvpcendpoint"
mwsvpcendpoint.MwsVpcEndpoint_IsTerraformElement(x interface{}) *bool
```
@@ -500,7 +507,7 @@ mwsvpcendpoint.MwsVpcEndpoint_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsvpcendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsvpcendpoint"
mwsvpcendpoint.MwsVpcEndpoint_IsTerraformResource(x interface{}) *bool
```
@@ -514,7 +521,7 @@ mwsvpcendpoint.MwsVpcEndpoint_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsvpcendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsvpcendpoint"
mwsvpcendpoint.MwsVpcEndpoint_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -985,7 +992,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsvpcendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsvpcendpoint"
&mwsvpcendpoint.MwsVpcEndpointConfig {
Connection: interface{},
@@ -1000,7 +1007,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsvpcendpo
AwsAccountId: *string,
AwsEndpointServiceId: *string,
AwsVpcEndpointId: *string,
- GcpVpcEndpointInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsVpcEndpoint.MwsVpcEndpointGcpVpcEndpointInfo,
+ GcpVpcEndpointInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsVpcEndpoint.MwsVpcEndpointGcpVpcEndpointInfo,
Id: *string,
Region: *string,
State: *string,
@@ -1246,7 +1253,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsvpcendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsvpcendpoint"
&mwsvpcendpoint.MwsVpcEndpointGcpVpcEndpointInfo {
EndpointRegion: *string,
@@ -1336,7 +1343,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsvpcendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsvpcendpoint"
mwsvpcendpoint.NewMwsVpcEndpointGcpVpcEndpointInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsVpcEndpointGcpVpcEndpointInfoOutputReference
```
diff --git a/docs/mwsVpcEndpoint.java.md b/docs/mwsVpcEndpoint.java.md
index 418686c3e..23cda1e7f 100644
--- a/docs/mwsVpcEndpoint.java.md
+++ b/docs/mwsVpcEndpoint.java.md
@@ -223,6 +223,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -305,6 +306,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mwsVpcEndpoint.python.md b/docs/mwsVpcEndpoint.python.md
index e6b41582d..ed9eb3b35 100644
--- a/docs/mwsVpcEndpoint.python.md
+++ b/docs/mwsVpcEndpoint.python.md
@@ -221,6 +221,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -308,6 +309,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mwsVpcEndpoint.typescript.md b/docs/mwsVpcEndpoint.typescript.md
index eba7daacc..2964d637c 100644
--- a/docs/mwsVpcEndpoint.typescript.md
+++ b/docs/mwsVpcEndpoint.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -136,6 +137,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/mwsWorkspaces.csharp.md b/docs/mwsWorkspaces.csharp.md
index 178bd0e5b..21ae4bc36 100644
--- a/docs/mwsWorkspaces.csharp.md
+++ b/docs/mwsWorkspaces.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -156,6 +157,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/mwsWorkspaces.go.md b/docs/mwsWorkspaces.go.md
index 537f045e3..599c154b5 100644
--- a/docs/mwsWorkspaces.go.md
+++ b/docs/mwsWorkspaces.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.NewMwsWorkspaces(scope Construct, id *string, config MwsWorkspacesConfig) MwsWorkspaces
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -156,6 +157,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -624,7 +631,7 @@ func ResetWorkspaceUrl()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.MwsWorkspaces_IsConstruct(x interface{}) *bool
```
@@ -656,7 +663,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.MwsWorkspaces_IsTerraformElement(x interface{}) *bool
```
@@ -670,7 +677,7 @@ mwsworkspaces.MwsWorkspaces_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.MwsWorkspaces_IsTerraformResource(x interface{}) *bool
```
@@ -684,7 +691,7 @@ mwsworkspaces.MwsWorkspaces_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.MwsWorkspaces_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1507,10 +1514,10 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
&mwsworkspaces.MwsWorkspacesCloudResourceContainer {
- Gcp: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsWorkspaces.MwsWorkspacesCloudResourceContainerGcp,
+ Gcp: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsWorkspaces.MwsWorkspacesCloudResourceContainerGcp,
}
```
@@ -1541,7 +1548,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
&mwsworkspaces.MwsWorkspacesCloudResourceContainerGcp {
ProjectId: *string,
@@ -1573,7 +1580,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
&mwsworkspaces.MwsWorkspacesConfig {
Connection: interface{},
@@ -1587,14 +1594,14 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspac
WorkspaceName: *string,
AwsRegion: *string,
Cloud: *string,
- CloudResourceContainer: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsWorkspaces.MwsWorkspacesCloudResourceContainer,
+ CloudResourceContainer: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsWorkspaces.MwsWorkspacesCloudResourceContainer,
CreationTime: *f64,
CredentialsId: *string,
CustomerManagedKeyId: *string,
DeploymentName: *string,
- ExternalCustomerInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsWorkspaces.MwsWorkspacesExternalCustomerInfo,
- GcpManagedNetworkConfig: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsWorkspaces.MwsWorkspacesGcpManagedNetworkConfig,
- GkeConfig: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsWorkspaces.MwsWorkspacesGkeConfig,
+ ExternalCustomerInfo: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsWorkspaces.MwsWorkspacesExternalCustomerInfo,
+ GcpManagedNetworkConfig: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsWorkspaces.MwsWorkspacesGcpManagedNetworkConfig,
+ GkeConfig: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsWorkspaces.MwsWorkspacesGkeConfig,
Id: *string,
IsNoPublicIpEnabled: interface{},
Location: *string,
@@ -1604,8 +1611,8 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspac
PrivateAccessSettingsId: *string,
StorageConfigurationId: *string,
StorageCustomerManagedKeyId: *string,
- Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsWorkspaces.MwsWorkspacesTimeouts,
- Token: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.mwsWorkspaces.MwsWorkspacesToken,
+ Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsWorkspaces.MwsWorkspacesTimeouts,
+ Token: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.mwsWorkspaces.MwsWorkspacesToken,
WorkspaceId: *f64,
WorkspaceStatus: *string,
WorkspaceStatusMessage: *string,
@@ -2068,7 +2075,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
&mwsworkspaces.MwsWorkspacesExternalCustomerInfo {
AuthoritativeUserEmail: *string,
@@ -2128,7 +2135,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
&mwsworkspaces.MwsWorkspacesGcpManagedNetworkConfig {
GkeClusterPodIpRange: *string,
@@ -2188,7 +2195,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
&mwsworkspaces.MwsWorkspacesGkeConfig {
ConnectivityType: *string,
@@ -2234,7 +2241,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
&mwsworkspaces.MwsWorkspacesTimeouts {
Create: *string,
@@ -2294,7 +2301,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
&mwsworkspaces.MwsWorkspacesToken {
Comment: *string,
@@ -2370,7 +2377,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.NewMwsWorkspacesCloudResourceContainerGcpOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsWorkspacesCloudResourceContainerGcpOutputReference
```
@@ -2641,7 +2648,7 @@ func InternalValue() MwsWorkspacesCloudResourceContainerGcp
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.NewMwsWorkspacesCloudResourceContainerOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsWorkspacesCloudResourceContainerOutputReference
```
@@ -2925,7 +2932,7 @@ func InternalValue() MwsWorkspacesCloudResourceContainer
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.NewMwsWorkspacesExternalCustomerInfoOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsWorkspacesExternalCustomerInfoOutputReference
```
@@ -3240,7 +3247,7 @@ func InternalValue() MwsWorkspacesExternalCustomerInfo
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.NewMwsWorkspacesGcpManagedNetworkConfigOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsWorkspacesGcpManagedNetworkConfigOutputReference
```
@@ -3555,7 +3562,7 @@ func InternalValue() MwsWorkspacesGcpManagedNetworkConfig
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.NewMwsWorkspacesGkeConfigOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsWorkspacesGkeConfigOutputReference
```
@@ -3848,7 +3855,7 @@ func InternalValue() MwsWorkspacesGkeConfig
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.NewMwsWorkspacesTimeoutsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsWorkspacesTimeoutsOutputReference
```
@@ -4184,7 +4191,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/mwsworkspaces"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/mwsworkspaces"
mwsworkspaces.NewMwsWorkspacesTokenOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) MwsWorkspacesTokenOutputReference
```
diff --git a/docs/mwsWorkspaces.java.md b/docs/mwsWorkspaces.java.md
index fddc22608..26f704487 100644
--- a/docs/mwsWorkspaces.java.md
+++ b/docs/mwsWorkspaces.java.md
@@ -394,6 +394,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -496,6 +497,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/mwsWorkspaces.python.md b/docs/mwsWorkspaces.python.md
index 7a48d19c2..60ff55882 100644
--- a/docs/mwsWorkspaces.python.md
+++ b/docs/mwsWorkspaces.python.md
@@ -391,6 +391,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -498,6 +499,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/mwsWorkspaces.typescript.md b/docs/mwsWorkspaces.typescript.md
index 9fd7ccbb0..a418c6511 100644
--- a/docs/mwsWorkspaces.typescript.md
+++ b/docs/mwsWorkspaces.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -156,6 +157,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/notebook.csharp.md b/docs/notebook.csharp.md
index 3788b06b7..8f2f8081c 100644
--- a/docs/notebook.csharp.md
+++ b/docs/notebook.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/notebook.go.md b/docs/notebook.go.md
index a3eeeab81..248fb2d28 100644
--- a/docs/notebook.go.md
+++ b/docs/notebook.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/notebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/notebook"
notebook.NewNotebook(scope Construct, id *string, config NotebookConfig) Notebook
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -427,7 +434,7 @@ func ResetSource()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/notebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/notebook"
notebook.Notebook_IsConstruct(x interface{}) *bool
```
@@ -459,7 +466,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/notebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/notebook"
notebook.Notebook_IsTerraformElement(x interface{}) *bool
```
@@ -473,7 +480,7 @@ notebook.Notebook_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/notebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/notebook"
notebook.Notebook_IsTerraformResource(x interface{}) *bool
```
@@ -487,7 +494,7 @@ notebook.Notebook_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/notebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/notebook"
notebook.Notebook_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -925,7 +932,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/notebook"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/notebook"
¬ebook.NotebookConfig {
Connection: interface{},
diff --git a/docs/notebook.java.md b/docs/notebook.java.md
index 74391a483..948cb67e5 100644
--- a/docs/notebook.java.md
+++ b/docs/notebook.java.md
@@ -201,6 +201,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -280,6 +281,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/notebook.python.md b/docs/notebook.python.md
index fbc049db0..a92bab865 100644
--- a/docs/notebook.python.md
+++ b/docs/notebook.python.md
@@ -199,6 +199,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -283,6 +284,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/notebook.typescript.md b/docs/notebook.typescript.md
index 485432f92..695e0d9b6 100644
--- a/docs/notebook.typescript.md
+++ b/docs/notebook.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/oboToken.csharp.md b/docs/oboToken.csharp.md
index 1ff093c71..05ac4491b 100644
--- a/docs/oboToken.csharp.md
+++ b/docs/oboToken.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/oboToken.go.md b/docs/oboToken.go.md
index 7d2ce410d..ba6c0ff05 100644
--- a/docs/oboToken.go.md
+++ b/docs/oboToken.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/obotoken"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/obotoken"
obotoken.NewOboToken(scope Construct, id *string, config OboTokenConfig) OboToken
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -392,7 +399,7 @@ func ResetLifetimeSeconds()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/obotoken"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/obotoken"
obotoken.OboToken_IsConstruct(x interface{}) *bool
```
@@ -424,7 +431,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/obotoken"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/obotoken"
obotoken.OboToken_IsTerraformElement(x interface{}) *bool
```
@@ -438,7 +445,7 @@ obotoken.OboToken_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/obotoken"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/obotoken"
obotoken.OboToken_IsTerraformResource(x interface{}) *bool
```
@@ -452,7 +459,7 @@ obotoken.OboToken_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/obotoken"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/obotoken"
obotoken.OboToken_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -780,7 +787,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/obotoken"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/obotoken"
&obotoken.OboTokenConfig {
Connection: interface{},
diff --git a/docs/oboToken.java.md b/docs/oboToken.java.md
index 3a2de9aaf..ff35e55d0 100644
--- a/docs/oboToken.java.md
+++ b/docs/oboToken.java.md
@@ -151,6 +151,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -225,6 +226,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/oboToken.python.md b/docs/oboToken.python.md
index 025e95107..6fa3b7449 100644
--- a/docs/oboToken.python.md
+++ b/docs/oboToken.python.md
@@ -149,6 +149,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -228,6 +229,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/oboToken.typescript.md b/docs/oboToken.typescript.md
index 4fd520d3b..40802553a 100644
--- a/docs/oboToken.typescript.md
+++ b/docs/oboToken.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/permissionAssignment.csharp.md b/docs/permissionAssignment.csharp.md
index 6d5049e7c..4c26dd8e7 100644
--- a/docs/permissionAssignment.csharp.md
+++ b/docs/permissionAssignment.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/permissionAssignment.go.md b/docs/permissionAssignment.go.md
index b94d10a0a..76bc435b3 100644
--- a/docs/permissionAssignment.go.md
+++ b/docs/permissionAssignment.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissionassignment"
permissionassignment.NewPermissionAssignment(scope Construct, id *string, config PermissionAssignmentConfig) PermissionAssignment
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissionassignment"
permissionassignment.PermissionAssignment_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissionassignment"
permissionassignment.PermissionAssignment_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ permissionassignment.PermissionAssignment_IsTerraformElement(x interface{}) *boo
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissionassignment"
permissionassignment.PermissionAssignment_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ permissionassignment.PermissionAssignment_IsTerraformResource(x interface{}) *bo
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissionassignment"
permissionassignment.PermissionAssignment_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -733,7 +740,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissionassignment"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissionassignment"
&permissionassignment.PermissionAssignmentConfig {
Connection: interface{},
diff --git a/docs/permissionAssignment.java.md b/docs/permissionAssignment.java.md
index 111d14c4f..8855fc678 100644
--- a/docs/permissionAssignment.java.md
+++ b/docs/permissionAssignment.java.md
@@ -141,6 +141,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -213,6 +214,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/permissionAssignment.python.md b/docs/permissionAssignment.python.md
index f86ea1768..ed4f51517 100644
--- a/docs/permissionAssignment.python.md
+++ b/docs/permissionAssignment.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -216,6 +217,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/permissionAssignment.typescript.md b/docs/permissionAssignment.typescript.md
index b2c6e1fec..d28ed8fdb 100644
--- a/docs/permissionAssignment.typescript.md
+++ b/docs/permissionAssignment.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/permissions.csharp.md b/docs/permissions.csharp.md
index deb38510d..4047ecb5a 100644
--- a/docs/permissions.csharp.md
+++ b/docs/permissions.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -149,6 +150,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1922,6 +1929,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1929,6 +1937,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/permissions.go.md b/docs/permissions.go.md
index 7dcd70de8..bdd1b6ca9 100644
--- a/docs/permissions.go.md
+++ b/docs/permissions.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissions"
permissions.NewPermissions(scope Construct, id *string, config PermissionsConfig) Permissions
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -149,6 +150,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -545,7 +552,7 @@ func ResetWorkspaceFilePath()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissions"
permissions.Permissions_IsConstruct(x interface{}) *bool
```
@@ -577,7 +584,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissions"
permissions.Permissions_IsTerraformElement(x interface{}) *bool
```
@@ -591,7 +598,7 @@ permissions.Permissions_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissions"
permissions.Permissions_IsTerraformResource(x interface{}) *bool
```
@@ -605,7 +612,7 @@ permissions.Permissions_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissions"
permissions.Permissions_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1362,7 +1369,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissions"
&permissions.PermissionsAccessControl {
PermissionLevel: *string,
@@ -1436,7 +1443,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissions"
&permissions.PermissionsConfig {
Connection: interface{},
@@ -1881,7 +1888,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissions"
permissions.NewPermissionsAccessControlList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) PermissionsAccessControlList
```
@@ -1922,6 +1929,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1929,6 +1937,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2024,7 +2048,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/permissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/permissions"
permissions.NewPermissionsAccessControlOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) PermissionsAccessControlOutputReference
```
diff --git a/docs/permissions.java.md b/docs/permissions.java.md
index a700b8827..5e62e9caa 100644
--- a/docs/permissions.java.md
+++ b/docs/permissions.java.md
@@ -354,6 +354,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -449,6 +450,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -2229,6 +2236,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2236,6 +2244,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/permissions.python.md b/docs/permissions.python.md
index bc0fc4870..4330707be 100644
--- a/docs/permissions.python.md
+++ b/docs/permissions.python.md
@@ -351,6 +351,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -451,6 +452,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -2273,6 +2280,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2280,6 +2288,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/permissions.typescript.md b/docs/permissions.typescript.md
index efab6d1fd..9bdee2e1e 100644
--- a/docs/permissions.typescript.md
+++ b/docs/permissions.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -149,6 +150,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1885,6 +1892,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1892,6 +1900,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/pipeline.csharp.md b/docs/pipeline.csharp.md
index f2464ddce..16c3a8e02 100644
--- a/docs/pipeline.csharp.md
+++ b/docs/pipeline.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -148,6 +149,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -6938,6 +6945,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -6945,6 +6953,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -8643,6 +8667,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -8650,6 +8675,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -10260,6 +10301,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -10267,6 +10309,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -11454,6 +11512,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -11461,6 +11520,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/pipeline.go.md b/docs/pipeline.go.md
index 9e4fd2696..59e2e06a8 100644
--- a/docs/pipeline.go.md
+++ b/docs/pipeline.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipeline(scope Construct, id *string, config PipelineConfig) Pipeline
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -148,6 +149,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -562,7 +569,7 @@ func ResetTimeouts()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.Pipeline_IsConstruct(x interface{}) *bool
```
@@ -594,7 +601,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.Pipeline_IsTerraformElement(x interface{}) *bool
```
@@ -608,7 +615,7 @@ pipeline.Pipeline_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.Pipeline_IsTerraformResource(x interface{}) *bool
```
@@ -622,7 +629,7 @@ pipeline.Pipeline_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.Pipeline_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1258,19 +1265,19 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineCluster {
ApplyPolicyDefaultValues: interface{},
- Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterAutoscale,
- AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterAwsAttributes,
- AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterAzureAttributes,
- ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterClusterLogConf,
+ Autoscale: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterAutoscale,
+ AwsAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterAwsAttributes,
+ AzureAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterAzureAttributes,
+ ClusterLogConf: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterClusterLogConf,
CustomTags: *map[string]*string,
DriverInstancePoolId: *string,
DriverNodeTypeId: *string,
EnableLocalDiskEncryption: interface{},
- GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterGcpAttributes,
+ GcpAttributes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterGcpAttributes,
InitScripts: interface{},
InstancePoolId: *string,
Label: *string,
@@ -1554,7 +1561,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterAutoscale {
MaxWorkers: *f64,
@@ -1614,7 +1621,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterAwsAttributes {
Availability: *string,
@@ -1744,7 +1751,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterAzureAttributes {
Availability: *string,
@@ -1804,11 +1811,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterClusterLogConf {
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterClusterLogConfDbfs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterClusterLogConfS3,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterClusterLogConfDbfs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterClusterLogConfS3,
}
```
@@ -1854,7 +1861,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterClusterLogConfDbfs {
Destination: *string,
@@ -1886,7 +1893,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterClusterLogConfS3 {
Destination: *string,
@@ -2002,7 +2009,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterGcpAttributes {
Availability: *string,
@@ -2076,16 +2083,16 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterInitScripts {
- Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterInitScriptsAbfss,
- Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterInitScriptsDbfs,
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterInitScriptsFile,
- Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterInitScriptsGcs,
- S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterInitScriptsS3,
- Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterInitScriptsVolumes,
- Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineClusterInitScriptsWorkspace,
+ Abfss: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterInitScriptsAbfss,
+ Dbfs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterInitScriptsDbfs,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterInitScriptsFile,
+ Gcs: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterInitScriptsGcs,
+ S3: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterInitScriptsS3,
+ Volumes: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterInitScriptsVolumes,
+ Workspace: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineClusterInitScriptsWorkspace,
}
```
@@ -2206,7 +2213,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterInitScriptsAbfss {
Destination: *string,
@@ -2238,7 +2245,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterInitScriptsDbfs {
Destination: *string,
@@ -2270,7 +2277,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterInitScriptsFile {
Destination: *string,
@@ -2302,7 +2309,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterInitScriptsGcs {
Destination: *string,
@@ -2334,7 +2341,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterInitScriptsS3 {
Destination: *string,
@@ -2450,7 +2457,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterInitScriptsVolumes {
Destination: *string,
@@ -2482,7 +2489,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineClusterInitScriptsWorkspace {
Destination: *string,
@@ -2514,7 +2521,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineConfig {
Connection: interface{},
@@ -2532,7 +2539,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
Continuous: interface{},
Development: interface{},
Edition: *string,
- Filters: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineFilters,
+ Filters: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineFilters,
Id: *string,
Library: interface{},
Name: *string,
@@ -2541,7 +2548,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
Serverless: interface{},
Storage: *string,
Target: *string,
- Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineTimeouts,
+ Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineTimeouts,
}
```
@@ -2881,7 +2888,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineFilters {
Exclude: *[]*string,
@@ -2927,13 +2934,13 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineLibrary {
- File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineLibraryFile,
+ File: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineLibraryFile,
Jar: *string,
- Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineLibraryMaven,
- Notebook: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.pipeline.PipelineLibraryNotebook,
+ Maven: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineLibraryMaven,
+ Notebook: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.pipeline.PipelineLibraryNotebook,
Whl: *string,
}
```
@@ -3021,7 +3028,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineLibraryFile {
Path: *string,
@@ -3053,7 +3060,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineLibraryMaven {
Coordinates: *string,
@@ -3113,7 +3120,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineLibraryNotebook {
Path: *string,
@@ -3145,7 +3152,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineNotification {
Alerts: *[]*string,
@@ -3191,7 +3198,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
&pipeline.PipelineTimeouts {
Default: *string,
@@ -3225,7 +3232,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterAutoscaleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterAutoscaleOutputReference
```
@@ -3561,7 +3568,7 @@ func InternalValue() PipelineClusterAutoscale
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterAwsAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterAwsAttributesOutputReference
```
@@ -4042,7 +4049,7 @@ func InternalValue() PipelineClusterAwsAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterAzureAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterAzureAttributesOutputReference
```
@@ -4378,7 +4385,7 @@ func InternalValue() PipelineClusterAzureAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterClusterLogConfDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterClusterLogConfDbfsOutputReference
```
@@ -4649,7 +4656,7 @@ func InternalValue() PipelineClusterClusterLogConfDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterClusterLogConfOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterClusterLogConfOutputReference
```
@@ -4982,7 +4989,7 @@ func InternalValue() PipelineClusterClusterLogConf
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterClusterLogConfS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterClusterLogConfS3OutputReference
```
@@ -5427,7 +5434,7 @@ func InternalValue() PipelineClusterClusterLogConfS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterGcpAttributesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterGcpAttributesOutputReference
```
@@ -5792,7 +5799,7 @@ func InternalValue() PipelineClusterGcpAttributes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterInitScriptsAbfssOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterInitScriptsAbfssOutputReference
```
@@ -6070,7 +6077,7 @@ func InternalValue() PipelineClusterInitScriptsAbfss
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterInitScriptsDbfsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterInitScriptsDbfsOutputReference
```
@@ -6341,7 +6348,7 @@ func InternalValue() PipelineClusterInitScriptsDbfs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterInitScriptsFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterInitScriptsFileOutputReference
```
@@ -6619,7 +6626,7 @@ func InternalValue() PipelineClusterInitScriptsFile
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterInitScriptsGcsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterInitScriptsGcsOutputReference
```
@@ -6897,7 +6904,7 @@ func InternalValue() PipelineClusterInitScriptsGcs
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterInitScriptsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) PipelineClusterInitScriptsList
```
@@ -6938,6 +6945,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -6945,6 +6953,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -7040,7 +7064,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterInitScriptsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) PipelineClusterInitScriptsOutputReference
```
@@ -7601,7 +7625,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterInitScriptsS3OutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterInitScriptsS3OutputReference
```
@@ -8046,7 +8070,7 @@ func InternalValue() PipelineClusterInitScriptsS3
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterInitScriptsVolumesOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterInitScriptsVolumesOutputReference
```
@@ -8324,7 +8348,7 @@ func InternalValue() PipelineClusterInitScriptsVolumes
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterInitScriptsWorkspaceOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineClusterInitScriptsWorkspaceOutputReference
```
@@ -8602,7 +8626,7 @@ func InternalValue() PipelineClusterInitScriptsWorkspace
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) PipelineClusterList
```
@@ -8643,6 +8667,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -8650,6 +8675,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -8745,7 +8786,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineClusterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) PipelineClusterOutputReference
```
@@ -9641,7 +9682,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineFiltersOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineFiltersOutputReference
```
@@ -9948,7 +9989,7 @@ func InternalValue() PipelineFilters
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineLibraryFileOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineLibraryFileOutputReference
```
@@ -10219,7 +10260,7 @@ func InternalValue() PipelineLibraryFile
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineLibraryList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) PipelineLibraryList
```
@@ -10260,6 +10301,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -10267,6 +10309,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -10362,7 +10420,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineLibraryMavenOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineLibraryMavenOutputReference
```
@@ -10691,7 +10749,7 @@ func InternalValue() PipelineLibraryMaven
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineLibraryNotebookOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineLibraryNotebookOutputReference
```
@@ -10962,7 +11020,7 @@ func InternalValue() PipelineLibraryNotebook
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineLibraryOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) PipelineLibraryOutputReference
```
@@ -11413,7 +11471,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineNotificationList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) PipelineNotificationList
```
@@ -11454,6 +11512,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -11461,6 +11520,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -11556,7 +11631,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineNotificationOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) PipelineNotificationOutputReference
```
@@ -11867,7 +11942,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/pipeline"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/pipeline"
pipeline.NewPipelineTimeoutsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) PipelineTimeoutsOutputReference
```
diff --git a/docs/pipeline.java.md b/docs/pipeline.java.md
index 8b50d9e4c..8698001db 100644
--- a/docs/pipeline.java.md
+++ b/docs/pipeline.java.md
@@ -309,6 +309,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -403,6 +404,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -7212,6 +7219,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -7219,6 +7227,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -8917,6 +8941,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -8924,6 +8949,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -10534,6 +10575,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -10541,6 +10583,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -11728,6 +11786,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -11735,6 +11794,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/pipeline.python.md b/docs/pipeline.python.md
index 79fed44be..3c846cf57 100644
--- a/docs/pipeline.python.md
+++ b/docs/pipeline.python.md
@@ -299,6 +299,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -398,6 +399,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -7595,6 +7602,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -7602,6 +7610,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -9492,6 +9518,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -9499,6 +9526,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -11355,6 +11400,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -11362,6 +11408,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -12664,6 +12728,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -12671,6 +12736,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/pipeline.typescript.md b/docs/pipeline.typescript.md
index c425a816d..e7c8e0105 100644
--- a/docs/pipeline.typescript.md
+++ b/docs/pipeline.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -148,6 +149,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -6807,6 +6814,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -6814,6 +6822,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -8512,6 +8536,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -8519,6 +8544,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -10129,6 +10170,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -10136,6 +10178,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -11323,6 +11381,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -11330,6 +11389,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/provider.csharp.md b/docs/provider.csharp.md
index 32265904d..e703f4728 100644
--- a/docs/provider.csharp.md
+++ b/docs/provider.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| ResetAccountId
| *No description.* |
@@ -139,6 +140,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/provider.go.md b/docs/provider.go.md
index 1e188f5b5..e2c00f888 100644
--- a/docs/provider.go.md
+++ b/docs/provider.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/provider"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/provider"
provider.NewDatabricksProvider(scope Construct, id *string, config DatabricksProviderConfig) DatabricksProvider
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| ResetAccountId
| *No description.* |
@@ -139,6 +140,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -347,7 +354,7 @@ func ResetWarehouseId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/provider"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/provider"
provider.DatabricksProvider_IsConstruct(x interface{}) *bool
```
@@ -379,7 +386,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/provider"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/provider"
provider.DatabricksProvider_IsTerraformElement(x interface{}) *bool
```
@@ -393,7 +400,7 @@ provider.DatabricksProvider_IsTerraformElement(x interface{}) *bool
##### `IsTerraformProvider`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/provider"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/provider"
provider.DatabricksProvider_IsTerraformProvider(x interface{}) *bool
```
@@ -407,7 +414,7 @@ provider.DatabricksProvider_IsTerraformProvider(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/provider"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/provider"
provider.DatabricksProvider_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1230,7 +1237,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/provider"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/provider"
&provider.DatabricksProviderConfig {
AccountId: *string,
diff --git a/docs/provider.java.md b/docs/provider.java.md
index e4c12a712..e9428a6f5 100644
--- a/docs/provider.java.md
+++ b/docs/provider.java.md
@@ -353,6 +353,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| resetAccountId
| *No description.* |
@@ -438,6 +439,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/provider.python.md b/docs/provider.python.md
index 8c3013c11..af8fc4a02 100644
--- a/docs/provider.python.md
+++ b/docs/provider.python.md
@@ -352,6 +352,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| reset_account_id
| *No description.* |
@@ -442,6 +443,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/provider.typescript.md b/docs/provider.typescript.md
index e9d80d5f5..fda8bef24 100644
--- a/docs/provider.typescript.md
+++ b/docs/provider.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| resetAccountId
| *No description.* |
@@ -139,6 +140,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/providerResource.csharp.md b/docs/providerResource.csharp.md
index 13130cdfa..ff507b23f 100644
--- a/docs/providerResource.csharp.md
+++ b/docs/providerResource.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/providerResource.go.md b/docs/providerResource.go.md
index 186428e3e..d3646c760 100644
--- a/docs/providerResource.go.md
+++ b/docs/providerResource.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/providerresource"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/providerresource"
providerresource.NewProviderResource(scope Construct, id *string, config ProviderResourceConfig) ProviderResource
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -385,7 +392,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/providerresource"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/providerresource"
providerresource.ProviderResource_IsConstruct(x interface{}) *bool
```
@@ -417,7 +424,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/providerresource"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/providerresource"
providerresource.ProviderResource_IsTerraformElement(x interface{}) *bool
```
@@ -431,7 +438,7 @@ providerresource.ProviderResource_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/providerresource"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/providerresource"
providerresource.ProviderResource_IsTerraformResource(x interface{}) *bool
```
@@ -445,7 +452,7 @@ providerresource.ProviderResource_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/providerresource"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/providerresource"
providerresource.ProviderResource_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -784,7 +791,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/providerresource"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/providerresource"
&providerresource.ProviderResourceConfig {
Connection: interface{},
diff --git a/docs/providerResource.java.md b/docs/providerResource.java.md
index 3b05e7425..b73f9fa6d 100644
--- a/docs/providerResource.java.md
+++ b/docs/providerResource.java.md
@@ -161,6 +161,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -234,6 +235,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/providerResource.python.md b/docs/providerResource.python.md
index 624a9ca3a..0c5361d86 100644
--- a/docs/providerResource.python.md
+++ b/docs/providerResource.python.md
@@ -159,6 +159,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -237,6 +238,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/providerResource.typescript.md b/docs/providerResource.typescript.md
index f94bb5954..7f8dafb9d 100644
--- a/docs/providerResource.typescript.md
+++ b/docs/providerResource.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/recipient.csharp.md b/docs/recipient.csharp.md
index 155eca440..a0d379b45 100644
--- a/docs/recipient.csharp.md
+++ b/docs/recipient.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -134,6 +135,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1633,6 +1640,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1640,6 +1648,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/recipient.go.md b/docs/recipient.go.md
index 147a98f8c..2b5962b2b 100644
--- a/docs/recipient.go.md
+++ b/docs/recipient.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
recipient.NewRecipient(scope Construct, id *string, config RecipientConfig) Recipient
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -134,6 +135,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -446,7 +453,7 @@ func ResetTokens()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
recipient.Recipient_IsConstruct(x interface{}) *bool
```
@@ -478,7 +485,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
recipient.Recipient_IsTerraformElement(x interface{}) *bool
```
@@ -492,7 +499,7 @@ recipient.Recipient_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
recipient.Recipient_IsTerraformResource(x interface{}) *bool
```
@@ -506,7 +513,7 @@ recipient.Recipient_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
recipient.Recipient_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -933,7 +940,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
&recipient.RecipientConfig {
Connection: interface{},
@@ -948,7 +955,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
Comment: *string,
DataRecipientGlobalMetastoreId: *string,
Id: *string,
- IpAccessList: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.recipient.RecipientIpAccessListStruct,
+ IpAccessList: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.recipient.RecipientIpAccessListStruct,
Owner: *string,
SharingCode: *string,
Tokens: interface{},
@@ -1168,7 +1175,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
&recipient.RecipientIpAccessListStruct {
AllowedIpAddresses: *[]*string,
@@ -1200,7 +1207,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
&recipient.RecipientTokens {
ActivationUrl: *string,
@@ -1321,7 +1328,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
recipient.NewRecipientIpAccessListStructOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) RecipientIpAccessListStructOutputReference
```
@@ -1592,7 +1599,7 @@ func InternalValue() RecipientIpAccessListStruct
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
recipient.NewRecipientTokensList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) RecipientTokensList
```
@@ -1633,6 +1640,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1640,6 +1648,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1735,7 +1759,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/recipient"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/recipient"
recipient.NewRecipientTokensOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) RecipientTokensOutputReference
```
diff --git a/docs/recipient.java.md b/docs/recipient.java.md
index 4eb218e8c..0e331892a 100644
--- a/docs/recipient.java.md
+++ b/docs/recipient.java.md
@@ -206,6 +206,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -286,6 +287,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1792,6 +1799,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1799,6 +1807,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/recipient.python.md b/docs/recipient.python.md
index d7b8e4903..90ad3e4d2 100644
--- a/docs/recipient.python.md
+++ b/docs/recipient.python.md
@@ -203,6 +203,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -288,6 +289,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1865,6 +1872,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1872,6 +1880,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/recipient.typescript.md b/docs/recipient.typescript.md
index bf275c0c5..83508045e 100644
--- a/docs/recipient.typescript.md
+++ b/docs/recipient.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -134,6 +135,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1606,6 +1613,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1613,6 +1621,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/registeredModel.csharp.md b/docs/registeredModel.csharp.md
index 0247f52bd..ea6a5076a 100644
--- a/docs/registeredModel.csharp.md
+++ b/docs/registeredModel.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/registeredModel.go.md b/docs/registeredModel.go.md
index 2509247b9..03dcc0652 100644
--- a/docs/registeredModel.go.md
+++ b/docs/registeredModel.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/registeredmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/registeredmodel"
registeredmodel.NewRegisteredModel(scope Construct, id *string, config RegisteredModelConfig) RegisteredModel
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -392,7 +399,7 @@ func ResetStorageLocation()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/registeredmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/registeredmodel"
registeredmodel.RegisteredModel_IsConstruct(x interface{}) *bool
```
@@ -424,7 +431,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/registeredmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/registeredmodel"
registeredmodel.RegisteredModel_IsTerraformElement(x interface{}) *bool
```
@@ -438,7 +445,7 @@ registeredmodel.RegisteredModel_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/registeredmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/registeredmodel"
registeredmodel.RegisteredModel_IsTerraformResource(x interface{}) *bool
```
@@ -452,7 +459,7 @@ registeredmodel.RegisteredModel_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/registeredmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/registeredmodel"
registeredmodel.RegisteredModel_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -813,7 +820,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/registeredmodel"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/registeredmodel"
®isteredmodel.RegisteredModelConfig {
Connection: interface{},
diff --git a/docs/registeredModel.java.md b/docs/registeredModel.java.md
index 5f4a3ec24..564bc2294 100644
--- a/docs/registeredModel.java.md
+++ b/docs/registeredModel.java.md
@@ -171,6 +171,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -245,6 +246,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/registeredModel.python.md b/docs/registeredModel.python.md
index a9f9a5a2b..e7963d006 100644
--- a/docs/registeredModel.python.md
+++ b/docs/registeredModel.python.md
@@ -169,6 +169,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -248,6 +249,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/registeredModel.typescript.md b/docs/registeredModel.typescript.md
index 97a90053f..65d3db17a 100644
--- a/docs/registeredModel.typescript.md
+++ b/docs/registeredModel.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/repo.csharp.md b/docs/repo.csharp.md
index 827afe76a..352d1c1ec 100644
--- a/docs/repo.csharp.md
+++ b/docs/repo.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/repo.go.md b/docs/repo.go.md
index c4bc7f9e8..204038f74 100644
--- a/docs/repo.go.md
+++ b/docs/repo.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/repo"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/repo"
repo.NewRepo(scope Construct, id *string, config RepoConfig) Repo
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -433,7 +440,7 @@ func ResetTag()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/repo"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/repo"
repo.Repo_IsConstruct(x interface{}) *bool
```
@@ -465,7 +472,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/repo"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/repo"
repo.Repo_IsTerraformElement(x interface{}) *bool
```
@@ -479,7 +486,7 @@ repo.Repo_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/repo"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/repo"
repo.Repo_IsTerraformResource(x interface{}) *bool
```
@@ -493,7 +500,7 @@ repo.Repo_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/repo"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/repo"
repo.Repo_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -898,7 +905,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/repo"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/repo"
&repo.RepoConfig {
Connection: interface{},
@@ -914,7 +921,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/repo"
GitProvider: *string,
Id: *string,
Path: *string,
- SparseCheckout: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.repo.RepoSparseCheckout,
+ SparseCheckout: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.repo.RepoSparseCheckout,
Tag: *string,
}
```
@@ -1117,7 +1124,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/repo"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/repo"
&repo.RepoSparseCheckout {
Patterns: *[]*string,
@@ -1151,7 +1158,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/repo"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/repo"
repo.NewRepoSparseCheckoutOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) RepoSparseCheckoutOutputReference
```
diff --git a/docs/repo.java.md b/docs/repo.java.md
index f4f1740cc..9be2198bc 100644
--- a/docs/repo.java.md
+++ b/docs/repo.java.md
@@ -193,6 +193,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -272,6 +273,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/repo.python.md b/docs/repo.python.md
index 4c0ce112a..5b3fe1bba 100644
--- a/docs/repo.python.md
+++ b/docs/repo.python.md
@@ -191,6 +191,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -275,6 +276,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/repo.typescript.md b/docs/repo.typescript.md
index 901e43f50..486cf666e 100644
--- a/docs/repo.typescript.md
+++ b/docs/repo.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/schema.csharp.md b/docs/schema.csharp.md
index 26dc2df77..472f0a344 100644
--- a/docs/schema.csharp.md
+++ b/docs/schema.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/schema.go.md b/docs/schema.go.md
index 5e67851ee..67644fc68 100644
--- a/docs/schema.go.md
+++ b/docs/schema.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/schema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/schema"
schema.NewSchema(scope Construct, id *string, config SchemaConfig) Schema
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -420,7 +427,7 @@ func ResetStorageRoot()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/schema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/schema"
schema.Schema_IsConstruct(x interface{}) *bool
```
@@ -452,7 +459,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/schema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/schema"
schema.Schema_IsTerraformElement(x interface{}) *bool
```
@@ -466,7 +473,7 @@ schema.Schema_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/schema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/schema"
schema.Schema_IsTerraformResource(x interface{}) *bool
```
@@ -480,7 +487,7 @@ schema.Schema_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/schema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/schema"
schema.Schema_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -907,7 +914,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/schema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/schema"
&schema.SchemaConfig {
Connection: interface{},
diff --git a/docs/schema.java.md b/docs/schema.java.md
index 2f8de8114..ccfdc4293 100644
--- a/docs/schema.java.md
+++ b/docs/schema.java.md
@@ -202,6 +202,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -280,6 +281,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/schema.python.md b/docs/schema.python.md
index 39a45c1e1..c903f1b24 100644
--- a/docs/schema.python.md
+++ b/docs/schema.python.md
@@ -199,6 +199,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -282,6 +283,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/schema.typescript.md b/docs/schema.typescript.md
index f13703ea2..3f316ffcc 100644
--- a/docs/schema.typescript.md
+++ b/docs/schema.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/secret.csharp.md b/docs/secret.csharp.md
index aace0c490..a015c302e 100644
--- a/docs/secret.csharp.md
+++ b/docs/secret.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/secret.go.md b/docs/secret.go.md
index c6e665b00..75927c50e 100644
--- a/docs/secret.go.md
+++ b/docs/secret.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secret"
secret.NewSecret(scope Construct, id *string, config SecretConfig) Secret
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secret"
secret.Secret_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secret"
secret.Secret_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ secret.Secret_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secret"
secret.Secret_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ secret.Secret_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secret"
secret.Secret_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -777,7 +784,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secret"
&secret.SecretConfig {
Connection: interface{},
diff --git a/docs/secret.java.md b/docs/secret.java.md
index becf266a8..a130a982a 100644
--- a/docs/secret.java.md
+++ b/docs/secret.java.md
@@ -151,6 +151,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -223,6 +224,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/secret.python.md b/docs/secret.python.md
index 322908d82..e5024728d 100644
--- a/docs/secret.python.md
+++ b/docs/secret.python.md
@@ -149,6 +149,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -226,6 +227,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/secret.typescript.md b/docs/secret.typescript.md
index 6fb60bc65..3777f7722 100644
--- a/docs/secret.typescript.md
+++ b/docs/secret.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/secretAcl.csharp.md b/docs/secretAcl.csharp.md
index 205f2d70c..938216be1 100644
--- a/docs/secretAcl.csharp.md
+++ b/docs/secretAcl.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/secretAcl.go.md b/docs/secretAcl.go.md
index 0da4fc3cb..87a62ff4c 100644
--- a/docs/secretAcl.go.md
+++ b/docs/secretAcl.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretacl"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretacl"
secretacl.NewSecretAcl(scope Construct, id *string, config SecretAclConfig) SecretAcl
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretacl"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretacl"
secretacl.SecretAcl_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretacl"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretacl"
secretacl.SecretAcl_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ secretacl.SecretAcl_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretacl"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretacl"
secretacl.SecretAcl_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ secretacl.SecretAcl_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretacl"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretacl"
secretacl.SecretAcl_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -755,7 +762,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretacl"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretacl"
&secretacl.SecretAclConfig {
Connection: interface{},
diff --git a/docs/secretAcl.java.md b/docs/secretAcl.java.md
index 3f3620cb1..cec4e349a 100644
--- a/docs/secretAcl.java.md
+++ b/docs/secretAcl.java.md
@@ -151,6 +151,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -223,6 +224,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/secretAcl.python.md b/docs/secretAcl.python.md
index 6b29f6558..90f8d548f 100644
--- a/docs/secretAcl.python.md
+++ b/docs/secretAcl.python.md
@@ -149,6 +149,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -226,6 +227,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/secretAcl.typescript.md b/docs/secretAcl.typescript.md
index 5884f45d9..214ad1e86 100644
--- a/docs/secretAcl.typescript.md
+++ b/docs/secretAcl.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/secretScope.csharp.md b/docs/secretScope.csharp.md
index d37df5776..65a272ed9 100644
--- a/docs/secretScope.csharp.md
+++ b/docs/secretScope.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/secretScope.go.md b/docs/secretScope.go.md
index ad8832537..99b512722 100644
--- a/docs/secretScope.go.md
+++ b/docs/secretScope.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretscope"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretscope"
secretscope.NewSecretScope(scope Construct, id *string, config SecretScopeConfig) SecretScope
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -412,7 +419,7 @@ func ResetKeyvaultMetadata()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretscope"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretscope"
secretscope.SecretScope_IsConstruct(x interface{}) *bool
```
@@ -444,7 +451,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretscope"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretscope"
secretscope.SecretScope_IsTerraformElement(x interface{}) *bool
```
@@ -458,7 +465,7 @@ secretscope.SecretScope_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretscope"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretscope"
secretscope.SecretScope_IsTerraformResource(x interface{}) *bool
```
@@ -472,7 +479,7 @@ secretscope.SecretScope_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretscope"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretscope"
secretscope.SecretScope_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -811,7 +818,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretscope"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretscope"
&secretscope.SecretScopeConfig {
Connection: interface{},
@@ -825,7 +832,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretscope
BackendType: *string,
Id: *string,
InitialManagePrincipal: *string,
- KeyvaultMetadata: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.secretScope.SecretScopeKeyvaultMetadata,
+ KeyvaultMetadata: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.secretScope.SecretScopeKeyvaultMetadata,
}
```
@@ -988,7 +995,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretscope"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretscope"
&secretscope.SecretScopeKeyvaultMetadata {
DnsName: *string,
@@ -1036,7 +1043,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/secretscope"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/secretscope"
secretscope.NewSecretScopeKeyvaultMetadataOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SecretScopeKeyvaultMetadataOutputReference
```
diff --git a/docs/secretScope.java.md b/docs/secretScope.java.md
index 55eddb559..0cbd45010 100644
--- a/docs/secretScope.java.md
+++ b/docs/secretScope.java.md
@@ -163,6 +163,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -239,6 +240,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/secretScope.python.md b/docs/secretScope.python.md
index 93d43fe2b..9efa52760 100644
--- a/docs/secretScope.python.md
+++ b/docs/secretScope.python.md
@@ -161,6 +161,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -242,6 +243,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/secretScope.typescript.md b/docs/secretScope.typescript.md
index 0d8a98d47..d72baa096 100644
--- a/docs/secretScope.typescript.md
+++ b/docs/secretScope.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/servicePrincipal.csharp.md b/docs/servicePrincipal.csharp.md
index f870ac085..ed8bae4ac 100644
--- a/docs/servicePrincipal.csharp.md
+++ b/docs/servicePrincipal.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -141,6 +142,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/servicePrincipal.go.md b/docs/servicePrincipal.go.md
index 66388a85c..71842f29d 100644
--- a/docs/servicePrincipal.go.md
+++ b/docs/servicePrincipal.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipal"
serviceprincipal.NewServicePrincipal(scope Construct, id *string, config ServicePrincipalConfig) ServicePrincipal
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -141,6 +142,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -483,7 +490,7 @@ func ResetWorkspaceAccess()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipal"
serviceprincipal.ServicePrincipal_IsConstruct(x interface{}) *bool
```
@@ -515,7 +522,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipal"
serviceprincipal.ServicePrincipal_IsTerraformElement(x interface{}) *bool
```
@@ -529,7 +536,7 @@ serviceprincipal.ServicePrincipal_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipal"
serviceprincipal.ServicePrincipal_IsTerraformResource(x interface{}) *bool
```
@@ -543,7 +550,7 @@ serviceprincipal.ServicePrincipal_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipal"
serviceprincipal.ServicePrincipal_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1124,7 +1131,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipal"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipal"
&serviceprincipal.ServicePrincipalConfig {
Connection: interface{},
diff --git a/docs/servicePrincipal.java.md b/docs/servicePrincipal.java.md
index 68b335acd..7854a4a8e 100644
--- a/docs/servicePrincipal.java.md
+++ b/docs/servicePrincipal.java.md
@@ -280,6 +280,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -367,6 +368,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/servicePrincipal.python.md b/docs/servicePrincipal.python.md
index 62d56081f..afa3a8ec1 100644
--- a/docs/servicePrincipal.python.md
+++ b/docs/servicePrincipal.python.md
@@ -269,6 +269,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -361,6 +362,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/servicePrincipal.typescript.md b/docs/servicePrincipal.typescript.md
index a73a5dbc4..8c3e256fa 100644
--- a/docs/servicePrincipal.typescript.md
+++ b/docs/servicePrincipal.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -141,6 +142,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/servicePrincipalRole.csharp.md b/docs/servicePrincipalRole.csharp.md
index b7e3dd6d9..935b99b9a 100644
--- a/docs/servicePrincipalRole.csharp.md
+++ b/docs/servicePrincipalRole.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/servicePrincipalRole.go.md b/docs/servicePrincipalRole.go.md
index 4eb827f3d..d89c3a267 100644
--- a/docs/servicePrincipalRole.go.md
+++ b/docs/servicePrincipalRole.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalrole"
serviceprincipalrole.NewServicePrincipalRole(scope Construct, id *string, config ServicePrincipalRoleConfig) ServicePrincipalRole
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalrole"
serviceprincipalrole.ServicePrincipalRole_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalrole"
serviceprincipalrole.ServicePrincipalRole_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ serviceprincipalrole.ServicePrincipalRole_IsTerraformElement(x interface{}) *boo
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalrole"
serviceprincipalrole.ServicePrincipalRole_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ serviceprincipalrole.ServicePrincipalRole_IsTerraformResource(x interface{}) *bo
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalrole"
serviceprincipalrole.ServicePrincipalRole_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -733,7 +740,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalrole"
&serviceprincipalrole.ServicePrincipalRoleConfig {
Connection: interface{},
diff --git a/docs/servicePrincipalRole.java.md b/docs/servicePrincipalRole.java.md
index b9388779f..c299ccb90 100644
--- a/docs/servicePrincipalRole.java.md
+++ b/docs/servicePrincipalRole.java.md
@@ -141,6 +141,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -213,6 +214,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/servicePrincipalRole.python.md b/docs/servicePrincipalRole.python.md
index a78935696..7128dc78e 100644
--- a/docs/servicePrincipalRole.python.md
+++ b/docs/servicePrincipalRole.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -216,6 +217,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/servicePrincipalRole.typescript.md b/docs/servicePrincipalRole.typescript.md
index 144e2d71b..084798fba 100644
--- a/docs/servicePrincipalRole.typescript.md
+++ b/docs/servicePrincipalRole.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/servicePrincipalSecret.csharp.md b/docs/servicePrincipalSecret.csharp.md
index fe4ec3998..4181eb0e8 100644
--- a/docs/servicePrincipalSecret.csharp.md
+++ b/docs/servicePrincipalSecret.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/servicePrincipalSecret.go.md b/docs/servicePrincipalSecret.go.md
index eb768550b..bd83c77c4 100644
--- a/docs/servicePrincipalSecret.go.md
+++ b/docs/servicePrincipalSecret.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalsecret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalsecret"
serviceprincipalsecret.NewServicePrincipalSecret(scope Construct, id *string, config ServicePrincipalSecretConfig) ServicePrincipalSecret
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -392,7 +399,7 @@ func ResetStatus()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalsecret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalsecret"
serviceprincipalsecret.ServicePrincipalSecret_IsConstruct(x interface{}) *bool
```
@@ -424,7 +431,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalsecret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalsecret"
serviceprincipalsecret.ServicePrincipalSecret_IsTerraformElement(x interface{}) *bool
```
@@ -438,7 +445,7 @@ serviceprincipalsecret.ServicePrincipalSecret_IsTerraformElement(x interface{})
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalsecret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalsecret"
serviceprincipalsecret.ServicePrincipalSecret_IsTerraformResource(x interface{}) *bool
```
@@ -452,7 +459,7 @@ serviceprincipalsecret.ServicePrincipalSecret_IsTerraformResource(x interface{})
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalsecret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalsecret"
serviceprincipalsecret.ServicePrincipalSecret_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -769,7 +776,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/serviceprincipalsecret"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/serviceprincipalsecret"
&serviceprincipalsecret.ServicePrincipalSecretConfig {
Connection: interface{},
diff --git a/docs/servicePrincipalSecret.java.md b/docs/servicePrincipalSecret.java.md
index 4b1e2e1ab..44439d05e 100644
--- a/docs/servicePrincipalSecret.java.md
+++ b/docs/servicePrincipalSecret.java.md
@@ -151,6 +151,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -225,6 +226,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/servicePrincipalSecret.python.md b/docs/servicePrincipalSecret.python.md
index 8174170f2..3b095b6b5 100644
--- a/docs/servicePrincipalSecret.python.md
+++ b/docs/servicePrincipalSecret.python.md
@@ -149,6 +149,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -228,6 +229,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/servicePrincipalSecret.typescript.md b/docs/servicePrincipalSecret.typescript.md
index 6492f9942..8c003a638 100644
--- a/docs/servicePrincipalSecret.typescript.md
+++ b/docs/servicePrincipalSecret.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/share.csharp.md b/docs/share.csharp.md
index 505028dcb..8b6ea2b51 100644
--- a/docs/share.csharp.md
+++ b/docs/share.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1356,6 +1363,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1363,6 +1371,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -2084,6 +2108,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2091,6 +2116,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
@@ -2529,6 +2570,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2536,6 +2578,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/share.go.md b/docs/share.go.md
index 59c1ae320..1054e85af 100644
--- a/docs/share.go.md
+++ b/docs/share.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.NewShare(scope Construct, id *string, config ShareConfig) Share
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -419,7 +426,7 @@ func ResetOwner()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.Share_IsConstruct(x interface{}) *bool
```
@@ -451,7 +458,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.Share_IsTerraformElement(x interface{}) *bool
```
@@ -465,7 +472,7 @@ share.Share_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.Share_IsTerraformResource(x interface{}) *bool
```
@@ -479,7 +486,7 @@ share.Share_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.Share_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -840,7 +847,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
&share.ShareConfig {
Connection: interface{},
@@ -1031,7 +1038,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
&share.ShareObject {
DataObjectType: *string,
@@ -1205,7 +1212,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
&share.ShareObjectPartition {
Value: interface{},
@@ -1239,7 +1246,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
&share.ShareObjectPartitionValue {
Name: *string,
@@ -1315,7 +1322,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.NewShareObjectList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ShareObjectList
```
@@ -1356,6 +1363,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1363,6 +1371,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1458,7 +1482,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.NewShareObjectOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ShareObjectOutputReference
```
@@ -2043,7 +2067,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.NewShareObjectPartitionList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ShareObjectPartitionList
```
@@ -2084,6 +2108,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2091,6 +2116,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2186,7 +2227,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.NewShareObjectPartitionOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ShareObjectPartitionOutputReference
```
@@ -2488,7 +2529,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.NewShareObjectPartitionValueList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) ShareObjectPartitionValueList
```
@@ -2529,6 +2570,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2536,6 +2578,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2631,7 +2689,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/share"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/share"
share.NewShareObjectPartitionValueOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) ShareObjectPartitionValueOutputReference
```
diff --git a/docs/share.java.md b/docs/share.java.md
index f8545a8c8..48a377707 100644
--- a/docs/share.java.md
+++ b/docs/share.java.md
@@ -174,6 +174,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -251,6 +252,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1486,6 +1493,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1493,6 +1501,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -2214,6 +2238,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2221,6 +2246,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
@@ -2659,6 +2700,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2666,6 +2708,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/share.python.md b/docs/share.python.md
index e8d5f735b..1600787d6 100644
--- a/docs/share.python.md
+++ b/docs/share.python.md
@@ -171,6 +171,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -253,6 +254,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1527,6 +1534,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1534,6 +1542,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -2292,6 +2318,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2299,6 +2326,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
@@ -2774,6 +2819,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2781,6 +2827,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/share.typescript.md b/docs/share.typescript.md
index 4e19002c3..224bf29dc 100644
--- a/docs/share.typescript.md
+++ b/docs/share.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1323,6 +1330,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1330,6 +1338,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -2051,6 +2075,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2058,6 +2083,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
@@ -2496,6 +2537,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2503,6 +2545,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/sqlAlert.csharp.md b/docs/sqlAlert.csharp.md
index 68829d726..f566c7a2f 100644
--- a/docs/sqlAlert.csharp.md
+++ b/docs/sqlAlert.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/sqlAlert.go.md b/docs/sqlAlert.go.md
index 08379e6eb..c65c211c9 100644
--- a/docs/sqlAlert.go.md
+++ b/docs/sqlAlert.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlalert"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlalert"
sqlalert.NewSqlAlert(scope Construct, id *string, config SqlAlertConfig) SqlAlert
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -419,7 +426,7 @@ func ResetUpdatedAt()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlalert"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlalert"
sqlalert.SqlAlert_IsConstruct(x interface{}) *bool
```
@@ -451,7 +458,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlalert"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlalert"
sqlalert.SqlAlert_IsTerraformElement(x interface{}) *bool
```
@@ -465,7 +472,7 @@ sqlalert.SqlAlert_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlalert"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlalert"
sqlalert.SqlAlert_IsTerraformResource(x interface{}) *bool
```
@@ -479,7 +486,7 @@ sqlalert.SqlAlert_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlalert"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlalert"
sqlalert.SqlAlert_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -884,7 +891,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlalert"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlalert"
&sqlalert.SqlAlertConfig {
Connection: interface{},
@@ -895,7 +902,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlalert"
Provider: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformProvider,
Provisioners: *[]interface{},
Name: *string,
- Options: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlAlert.SqlAlertOptions,
+ Options: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlAlert.SqlAlertOptions,
QueryId: *string,
CreatedAt: *string,
Id: *string,
@@ -1103,7 +1110,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlalert"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlalert"
&sqlalert.SqlAlertOptions {
Column: *string,
@@ -1221,7 +1228,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlalert"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlalert"
sqlalert.NewSqlAlertOptionsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlAlertOptionsOutputReference
```
diff --git a/docs/sqlAlert.java.md b/docs/sqlAlert.java.md
index 6a2ea50d9..1a19e3851 100644
--- a/docs/sqlAlert.java.md
+++ b/docs/sqlAlert.java.md
@@ -193,6 +193,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -270,6 +271,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/sqlAlert.python.md b/docs/sqlAlert.python.md
index b54f8ad9c..5fb3e02ec 100644
--- a/docs/sqlAlert.python.md
+++ b/docs/sqlAlert.python.md
@@ -191,6 +191,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -273,6 +274,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/sqlAlert.typescript.md b/docs/sqlAlert.typescript.md
index 0d747c6de..ef974d4d1 100644
--- a/docs/sqlAlert.typescript.md
+++ b/docs/sqlAlert.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/sqlDashboard.csharp.md b/docs/sqlDashboard.csharp.md
index e6c32c45e..7f29791ff 100644
--- a/docs/sqlDashboard.csharp.md
+++ b/docs/sqlDashboard.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/sqlDashboard.go.md b/docs/sqlDashboard.go.md
index 6c28ade37..46f2881b3 100644
--- a/docs/sqlDashboard.go.md
+++ b/docs/sqlDashboard.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqldashboard"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqldashboard"
sqldashboard.NewSqlDashboard(scope Construct, id *string, config SqlDashboardConfig) SqlDashboard
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -420,7 +427,7 @@ func ResetUpdatedAt()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqldashboard"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqldashboard"
sqldashboard.SqlDashboard_IsConstruct(x interface{}) *bool
```
@@ -452,7 +459,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqldashboard"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqldashboard"
sqldashboard.SqlDashboard_IsTerraformElement(x interface{}) *bool
```
@@ -466,7 +473,7 @@ sqldashboard.SqlDashboard_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqldashboard"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqldashboard"
sqldashboard.SqlDashboard_IsTerraformResource(x interface{}) *bool
```
@@ -480,7 +487,7 @@ sqldashboard.SqlDashboard_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqldashboard"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqldashboard"
sqldashboard.SqlDashboard_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -885,7 +892,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqldashboard"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqldashboard"
&sqldashboard.SqlDashboardConfig {
Connection: interface{},
diff --git a/docs/sqlDashboard.java.md b/docs/sqlDashboard.java.md
index 3c8c77439..5cd215f65 100644
--- a/docs/sqlDashboard.java.md
+++ b/docs/sqlDashboard.java.md
@@ -192,6 +192,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -270,6 +271,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/sqlDashboard.python.md b/docs/sqlDashboard.python.md
index 2edf9c905..32dfcd65a 100644
--- a/docs/sqlDashboard.python.md
+++ b/docs/sqlDashboard.python.md
@@ -189,6 +189,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -272,6 +273,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/sqlDashboard.typescript.md b/docs/sqlDashboard.typescript.md
index 0a3656ddc..c6b5bfbe3 100644
--- a/docs/sqlDashboard.typescript.md
+++ b/docs/sqlDashboard.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/sqlEndpoint.csharp.md b/docs/sqlEndpoint.csharp.md
index 52f18cc65..5bb8803d9 100644
--- a/docs/sqlEndpoint.csharp.md
+++ b/docs/sqlEndpoint.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -146,6 +147,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -2554,6 +2561,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2561,6 +2569,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/sqlEndpoint.go.md b/docs/sqlEndpoint.go.md
index 0135cc5a9..eefd61b33 100644
--- a/docs/sqlEndpoint.go.md
+++ b/docs/sqlEndpoint.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.NewSqlEndpoint(scope Construct, id *string, config SqlEndpointConfig) SqlEndpoint
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -146,6 +147,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -542,7 +549,7 @@ func ResetWarehouseType()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.SqlEndpoint_IsConstruct(x interface{}) *bool
```
@@ -574,7 +581,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.SqlEndpoint_IsTerraformElement(x interface{}) *bool
```
@@ -588,7 +595,7 @@ sqlendpoint.SqlEndpoint_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.SqlEndpoint_IsTerraformResource(x interface{}) *bool
```
@@ -602,7 +609,7 @@ sqlendpoint.SqlEndpoint_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.SqlEndpoint_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1249,7 +1256,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
&sqlendpoint.SqlEndpointChannel {
Name: *string,
@@ -1281,7 +1288,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
&sqlendpoint.SqlEndpointConfig {
Connection: interface{},
@@ -1294,7 +1301,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint
ClusterSize: *string,
Name: *string,
AutoStopMins: *f64,
- Channel: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlEndpoint.SqlEndpointChannel,
+ Channel: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlEndpoint.SqlEndpointChannel,
DataSourceId: *string,
EnablePhoton: interface{},
EnableServerlessCompute: interface{},
@@ -1304,11 +1311,11 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint
MaxNumClusters: *f64,
MinNumClusters: *f64,
NumClusters: *f64,
- OdbcParams: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlEndpoint.SqlEndpointOdbcParams,
+ OdbcParams: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlEndpoint.SqlEndpointOdbcParams,
SpotInstancePolicy: *string,
State: *string,
- Tags: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlEndpoint.SqlEndpointTags,
- Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlEndpoint.SqlEndpointTimeouts,
+ Tags: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlEndpoint.SqlEndpointTags,
+ Timeouts: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlEndpoint.SqlEndpointTimeouts,
WarehouseType: *string,
}
```
@@ -1660,7 +1667,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
&sqlendpoint.SqlEndpointOdbcParams {
Path: *string,
@@ -1748,7 +1755,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
&sqlendpoint.SqlEndpointTags {
CustomTags: interface{},
@@ -1782,7 +1789,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
&sqlendpoint.SqlEndpointTagsCustomTags {
Key: *string,
@@ -1828,7 +1835,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
&sqlendpoint.SqlEndpointTimeouts {
Create: *string,
@@ -1862,7 +1869,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.NewSqlEndpointChannelOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlEndpointChannelOutputReference
```
@@ -2140,7 +2147,7 @@ func InternalValue() SqlEndpointChannel
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.NewSqlEndpointOdbcParamsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlEndpointOdbcParamsOutputReference
```
@@ -2513,7 +2520,7 @@ func InternalValue() SqlEndpointOdbcParams
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.NewSqlEndpointTagsCustomTagsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) SqlEndpointTagsCustomTagsList
```
@@ -2554,6 +2561,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -2561,6 +2569,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -2656,7 +2680,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.NewSqlEndpointTagsCustomTagsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) SqlEndpointTagsCustomTagsOutputReference
```
@@ -2967,7 +2991,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.NewSqlEndpointTagsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlEndpointTagsOutputReference
```
@@ -3251,7 +3275,7 @@ func InternalValue() SqlEndpointTags
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlendpoint"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlendpoint"
sqlendpoint.NewSqlEndpointTimeoutsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlEndpointTimeoutsOutputReference
```
diff --git a/docs/sqlEndpoint.java.md b/docs/sqlEndpoint.java.md
index 2a3f301ca..4e6c20309 100644
--- a/docs/sqlEndpoint.java.md
+++ b/docs/sqlEndpoint.java.md
@@ -311,6 +311,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -403,6 +404,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -2820,6 +2827,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2827,6 +2835,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/sqlEndpoint.python.md b/docs/sqlEndpoint.python.md
index a54bae574..c7e851a1f 100644
--- a/docs/sqlEndpoint.python.md
+++ b/docs/sqlEndpoint.python.md
@@ -307,6 +307,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -404,6 +405,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -2963,6 +2970,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -2970,6 +2978,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/sqlEndpoint.typescript.md b/docs/sqlEndpoint.typescript.md
index 13f5ba24d..41df18f50 100644
--- a/docs/sqlEndpoint.typescript.md
+++ b/docs/sqlEndpoint.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -146,6 +147,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -2512,6 +2519,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -2519,6 +2527,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/sqlGlobalConfig.csharp.md b/docs/sqlGlobalConfig.csharp.md
index eb0ec6528..16c0a8cf3 100644
--- a/docs/sqlGlobalConfig.csharp.md
+++ b/docs/sqlGlobalConfig.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/sqlGlobalConfig.go.md b/docs/sqlGlobalConfig.go.md
index f03e421e4..78f207af8 100644
--- a/docs/sqlGlobalConfig.go.md
+++ b/docs/sqlGlobalConfig.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlglobalconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlglobalconfig"
sqlglobalconfig.NewSqlGlobalConfig(scope Construct, id *string, config SqlGlobalConfigConfig) SqlGlobalConfig
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -420,7 +427,7 @@ func ResetSqlConfigParams()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlglobalconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlglobalconfig"
sqlglobalconfig.SqlGlobalConfig_IsConstruct(x interface{}) *bool
```
@@ -452,7 +459,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlglobalconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlglobalconfig"
sqlglobalconfig.SqlGlobalConfig_IsTerraformElement(x interface{}) *bool
```
@@ -466,7 +473,7 @@ sqlglobalconfig.SqlGlobalConfig_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlglobalconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlglobalconfig"
sqlglobalconfig.SqlGlobalConfig_IsTerraformResource(x interface{}) *bool
```
@@ -480,7 +487,7 @@ sqlglobalconfig.SqlGlobalConfig_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlglobalconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlglobalconfig"
sqlglobalconfig.SqlGlobalConfig_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -863,7 +870,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlglobalconfig"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlglobalconfig"
&sqlglobalconfig.SqlGlobalConfigConfig {
Connection: interface{},
diff --git a/docs/sqlGlobalConfig.java.md b/docs/sqlGlobalConfig.java.md
index 528e2d7c6..569f17eaf 100644
--- a/docs/sqlGlobalConfig.java.md
+++ b/docs/sqlGlobalConfig.java.md
@@ -182,6 +182,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -260,6 +261,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/sqlGlobalConfig.python.md b/docs/sqlGlobalConfig.python.md
index c9309d889..ef1e96dbb 100644
--- a/docs/sqlGlobalConfig.python.md
+++ b/docs/sqlGlobalConfig.python.md
@@ -179,6 +179,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -262,6 +263,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/sqlGlobalConfig.typescript.md b/docs/sqlGlobalConfig.typescript.md
index 8ef2846e9..679810e0c 100644
--- a/docs/sqlGlobalConfig.typescript.md
+++ b/docs/sqlGlobalConfig.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -132,6 +133,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/sqlPermissions.csharp.md b/docs/sqlPermissions.csharp.md
index a491254c3..3eaf80b60 100644
--- a/docs/sqlPermissions.csharp.md
+++ b/docs/sqlPermissions.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -135,6 +136,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1256,6 +1263,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1263,6 +1271,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/sqlPermissions.go.md b/docs/sqlPermissions.go.md
index 7cec15a91..bf1c509ed 100644
--- a/docs/sqlPermissions.go.md
+++ b/docs/sqlPermissions.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlpermissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlpermissions"
sqlpermissions.NewSqlPermissions(scope Construct, id *string, config SqlPermissionsConfig) SqlPermissions
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -135,6 +136,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -447,7 +454,7 @@ func ResetView()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlpermissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlpermissions"
sqlpermissions.SqlPermissions_IsConstruct(x interface{}) *bool
```
@@ -479,7 +486,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlpermissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlpermissions"
sqlpermissions.SqlPermissions_IsTerraformElement(x interface{}) *bool
```
@@ -493,7 +500,7 @@ sqlpermissions.SqlPermissions_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlpermissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlpermissions"
sqlpermissions.SqlPermissions_IsTerraformResource(x interface{}) *bool
```
@@ -507,7 +514,7 @@ sqlpermissions.SqlPermissions_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlpermissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlpermissions"
sqlpermissions.SqlPermissions_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -934,7 +941,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlpermissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlpermissions"
&sqlpermissions.SqlPermissionsConfig {
Connection: interface{},
@@ -1167,7 +1174,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlpermissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlpermissions"
&sqlpermissions.SqlPermissionsPrivilegeAssignments {
Principal: *string,
@@ -1215,7 +1222,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlpermissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlpermissions"
sqlpermissions.NewSqlPermissionsPrivilegeAssignmentsList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) SqlPermissionsPrivilegeAssignmentsList
```
@@ -1256,6 +1263,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1263,6 +1271,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1358,7 +1382,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlpermissions"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlpermissions"
sqlpermissions.NewSqlPermissionsPrivilegeAssignmentsOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) SqlPermissionsPrivilegeAssignmentsOutputReference
```
diff --git a/docs/sqlPermissions.java.md b/docs/sqlPermissions.java.md
index 32dd4ba2f..d1b9555c7 100644
--- a/docs/sqlPermissions.java.md
+++ b/docs/sqlPermissions.java.md
@@ -207,6 +207,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -288,6 +289,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1419,6 +1426,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1426,6 +1434,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/sqlPermissions.python.md b/docs/sqlPermissions.python.md
index e7d429df3..de543f4e9 100644
--- a/docs/sqlPermissions.python.md
+++ b/docs/sqlPermissions.python.md
@@ -201,6 +201,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -287,6 +288,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1457,6 +1464,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1464,6 +1472,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/sqlPermissions.typescript.md b/docs/sqlPermissions.typescript.md
index a08b455e9..c4543beef 100644
--- a/docs/sqlPermissions.typescript.md
+++ b/docs/sqlPermissions.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -135,6 +136,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1236,6 +1243,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1243,6 +1251,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/sqlQuery.csharp.md b/docs/sqlQuery.csharp.md
index 6125702a4..842affcef 100644
--- a/docs/sqlQuery.csharp.md
+++ b/docs/sqlQuery.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -136,6 +137,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -5851,6 +5858,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -5858,6 +5866,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/sqlQuery.go.md b/docs/sqlQuery.go.md
index d1e36bfb9..b8f70be0a 100644
--- a/docs/sqlQuery.go.md
+++ b/docs/sqlQuery.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQuery(scope Construct, id *string, config SqlQueryConfig) SqlQuery
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -136,6 +137,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -460,7 +467,7 @@ func ResetUpdatedAt()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.SqlQuery_IsConstruct(x interface{}) *bool
```
@@ -492,7 +499,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.SqlQuery_IsTerraformElement(x interface{}) *bool
```
@@ -506,7 +513,7 @@ sqlquery.SqlQuery_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.SqlQuery_IsTerraformResource(x interface{}) *bool
```
@@ -520,7 +527,7 @@ sqlquery.SqlQuery_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.SqlQuery_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1013,7 +1020,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryConfig {
Connection: interface{},
@@ -1032,7 +1039,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
Parameter: interface{},
Parent: *string,
RunAsRole: *string,
- Schedule: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQuerySchedule,
+ Schedule: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQuerySchedule,
Tags: *[]*string,
UpdatedAt: *string,
}
@@ -1290,20 +1297,20 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameter {
Name: *string,
- Date: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterDate,
- DateRange: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterDateRange,
- Datetime: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterDatetime,
- DatetimeRange: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterDatetimeRange,
- Datetimesec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterDatetimesec,
- DatetimesecRange: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterDatetimesecRange,
- Enum: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterEnum,
- Number: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterNumber,
- Query: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterQuery,
- Text: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterText,
+ Date: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterDate,
+ DateRange: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterDateRange,
+ Datetime: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterDatetime,
+ DatetimeRange: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterDatetimeRange,
+ Datetimesec: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterDatetimesec,
+ DatetimesecRange: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterDatetimesecRange,
+ Enum: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterEnum,
+ Number: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterNumber,
+ Query: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterQuery,
+ Text: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterText,
Title: *string,
}
```
@@ -1496,7 +1503,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterDate {
Value: *string,
@@ -1528,10 +1535,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterDateRange {
- Range: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterDateRangeRange,
+ Range: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterDateRangeRange,
Value: *string,
}
```
@@ -1576,7 +1583,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterDateRangeRange {
End: *string,
@@ -1622,7 +1629,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterDatetime {
Value: *string,
@@ -1654,10 +1661,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterDatetimeRange {
- Range: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterDatetimeRangeRange,
+ Range: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterDatetimeRangeRange,
Value: *string,
}
```
@@ -1702,7 +1709,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterDatetimeRangeRange {
End: *string,
@@ -1748,7 +1755,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterDatetimesec {
Value: *string,
@@ -1780,10 +1787,10 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterDatetimesecRange {
- Range: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterDatetimesecRangeRange,
+ Range: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterDatetimesecRangeRange,
Value: *string,
}
```
@@ -1828,7 +1835,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterDatetimesecRangeRange {
End: *string,
@@ -1874,11 +1881,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterEnum {
Options: *[]*string,
- Multiple: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterEnumMultiple,
+ Multiple: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterEnumMultiple,
Value: *string,
Values: *[]*string,
}
@@ -1950,7 +1957,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterEnumMultiple {
Separator: *string,
@@ -2010,7 +2017,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterNumber {
Value: *f64,
@@ -2042,11 +2049,11 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterQuery {
QueryId: *string,
- Multiple: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryParameterQueryMultiple,
+ Multiple: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryParameterQueryMultiple,
Value: *string,
Values: *[]*string,
}
@@ -2118,7 +2125,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterQueryMultiple {
Separator: *string,
@@ -2178,7 +2185,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryParameterText {
Value: *string,
@@ -2210,12 +2217,12 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQuerySchedule {
- Continuous: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryScheduleContinuous,
- Daily: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryScheduleDaily,
- Weekly: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlQuery.SqlQueryScheduleWeekly,
+ Continuous: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryScheduleContinuous,
+ Daily: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryScheduleDaily,
+ Weekly: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlQuery.SqlQueryScheduleWeekly,
}
```
@@ -2276,7 +2283,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryScheduleContinuous {
IntervalSeconds: *f64,
@@ -2322,7 +2329,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryScheduleDaily {
IntervalDays: *f64,
@@ -2382,7 +2389,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
&sqlquery.SqlQueryScheduleWeekly {
DayOfWeek: *string,
@@ -2458,7 +2465,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterDateOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterDateOutputReference
```
@@ -2729,7 +2736,7 @@ func InternalValue() SqlQueryParameterDate
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterDateRangeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterDateRangeOutputReference
```
@@ -3049,7 +3056,7 @@ func InternalValue() SqlQueryParameterDateRange
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterDateRangeRangeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterDateRangeRangeOutputReference
```
@@ -3342,7 +3349,7 @@ func InternalValue() SqlQueryParameterDateRangeRange
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterDatetimeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterDatetimeOutputReference
```
@@ -3613,7 +3620,7 @@ func InternalValue() SqlQueryParameterDatetime
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterDatetimeRangeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterDatetimeRangeOutputReference
```
@@ -3933,7 +3940,7 @@ func InternalValue() SqlQueryParameterDatetimeRange
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterDatetimeRangeRangeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterDatetimeRangeRangeOutputReference
```
@@ -4226,7 +4233,7 @@ func InternalValue() SqlQueryParameterDatetimeRangeRange
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterDatetimesecOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterDatetimesecOutputReference
```
@@ -4497,7 +4504,7 @@ func InternalValue() SqlQueryParameterDatetimesec
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterDatetimesecRangeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterDatetimesecRangeOutputReference
```
@@ -4817,7 +4824,7 @@ func InternalValue() SqlQueryParameterDatetimesecRange
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterDatetimesecRangeRangeOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterDatetimesecRangeRangeOutputReference
```
@@ -5110,7 +5117,7 @@ func InternalValue() SqlQueryParameterDatetimesecRangeRange
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterEnumMultipleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterEnumMultipleOutputReference
```
@@ -5439,7 +5446,7 @@ func InternalValue() SqlQueryParameterEnumMultiple
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterEnumOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterEnumOutputReference
```
@@ -5810,7 +5817,7 @@ func InternalValue() SqlQueryParameterEnum
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) SqlQueryParameterList
```
@@ -5851,6 +5858,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -5858,6 +5866,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -5953,7 +5977,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterNumberOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterNumberOutputReference
```
@@ -6224,7 +6248,7 @@ func InternalValue() SqlQueryParameterNumber
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) SqlQueryParameterOutputReference
```
@@ -6962,7 +6986,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterQueryMultipleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterQueryMultipleOutputReference
```
@@ -7291,7 +7315,7 @@ func InternalValue() SqlQueryParameterQueryMultiple
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterQueryOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterQueryOutputReference
```
@@ -7662,7 +7686,7 @@ func InternalValue() SqlQueryParameterQuery
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryParameterTextOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryParameterTextOutputReference
```
@@ -7933,7 +7957,7 @@ func InternalValue() SqlQueryParameterText
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryScheduleContinuousOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryScheduleContinuousOutputReference
```
@@ -8233,7 +8257,7 @@ func InternalValue() SqlQueryScheduleContinuous
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryScheduleDailyOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryScheduleDailyOutputReference
```
@@ -8555,7 +8579,7 @@ func InternalValue() SqlQueryScheduleDaily
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryScheduleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryScheduleOutputReference
```
@@ -8930,7 +8954,7 @@ func InternalValue() SqlQuerySchedule
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlquery"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlquery"
sqlquery.NewSqlQueryScheduleWeeklyOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlQueryScheduleWeeklyOutputReference
```
diff --git a/docs/sqlQuery.java.md b/docs/sqlQuery.java.md
index 0e36bc2de..2d41492c4 100644
--- a/docs/sqlQuery.java.md
+++ b/docs/sqlQuery.java.md
@@ -236,6 +236,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -318,6 +319,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -6040,6 +6047,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -6047,6 +6055,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/sqlQuery.python.md b/docs/sqlQuery.python.md
index 0413e1526..3bbc5fa21 100644
--- a/docs/sqlQuery.python.md
+++ b/docs/sqlQuery.python.md
@@ -233,6 +233,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -320,6 +321,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -6448,6 +6455,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -6455,6 +6463,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/sqlQuery.typescript.md b/docs/sqlQuery.typescript.md
index d453e634c..f37b25dbc 100644
--- a/docs/sqlQuery.typescript.md
+++ b/docs/sqlQuery.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -136,6 +137,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -5756,6 +5763,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -5763,6 +5771,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/sqlTable.csharp.md b/docs/sqlTable.csharp.md
index 4883cb70e..190f6cef1 100644
--- a/docs/sqlTable.csharp.md
+++ b/docs/sqlTable.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -139,6 +140,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1600,6 +1607,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1607,6 +1615,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/sqlTable.go.md b/docs/sqlTable.go.md
index 0e74dd5c9..6a44fd5ce 100644
--- a/docs/sqlTable.go.md
+++ b/docs/sqlTable.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqltable"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqltable"
sqltable.NewSqlTable(scope Construct, id *string, config SqlTableConfig) SqlTable
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -139,6 +140,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -475,7 +482,7 @@ func ResetWarehouseId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqltable"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqltable"
sqltable.SqlTable_IsConstruct(x interface{}) *bool
```
@@ -507,7 +514,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqltable"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqltable"
sqltable.SqlTable_IsTerraformElement(x interface{}) *bool
```
@@ -521,7 +528,7 @@ sqltable.SqlTable_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqltable"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqltable"
sqltable.SqlTable_IsTerraformResource(x interface{}) *bool
```
@@ -535,7 +542,7 @@ sqltable.SqlTable_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqltable"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqltable"
sqltable.SqlTable_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1138,7 +1145,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqltable"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqltable"
&sqltable.SqlTableColumn {
Name: *string,
@@ -1212,7 +1219,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqltable"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqltable"
&sqltable.SqlTableConfig {
Connection: interface{},
@@ -1559,7 +1566,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqltable"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqltable"
sqltable.NewSqlTableColumnList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) SqlTableColumnList
```
@@ -1600,6 +1607,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1607,6 +1615,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1702,7 +1726,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqltable"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqltable"
sqltable.NewSqlTableColumnOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) SqlTableColumnOutputReference
```
diff --git a/docs/sqlTable.java.md b/docs/sqlTable.java.md
index 797453843..5f3ea542d 100644
--- a/docs/sqlTable.java.md
+++ b/docs/sqlTable.java.md
@@ -284,6 +284,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -369,6 +370,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1838,6 +1845,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1845,6 +1853,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/sqlTable.python.md b/docs/sqlTable.python.md
index ae6e889fe..cbe06309b 100644
--- a/docs/sqlTable.python.md
+++ b/docs/sqlTable.python.md
@@ -281,6 +281,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -371,6 +372,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1881,6 +1888,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1888,6 +1896,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/sqlTable.typescript.md b/docs/sqlTable.typescript.md
index a7ec3b075..c243435c2 100644
--- a/docs/sqlTable.typescript.md
+++ b/docs/sqlTable.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -139,6 +140,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1570,6 +1577,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1577,6 +1585,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/sqlVisualization.csharp.md b/docs/sqlVisualization.csharp.md
index d15e730d1..2bf5cbf18 100644
--- a/docs/sqlVisualization.csharp.md
+++ b/docs/sqlVisualization.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/sqlVisualization.go.md b/docs/sqlVisualization.go.md
index 37e9ac1e8..ce985aaf7 100644
--- a/docs/sqlVisualization.go.md
+++ b/docs/sqlVisualization.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlvisualization"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlvisualization"
sqlvisualization.NewSqlVisualization(scope Construct, id *string, config SqlVisualizationConfig) SqlVisualization
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -399,7 +406,7 @@ func ResetVisualizationId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlvisualization"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlvisualization"
sqlvisualization.SqlVisualization_IsConstruct(x interface{}) *bool
```
@@ -431,7 +438,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlvisualization"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlvisualization"
sqlvisualization.SqlVisualization_IsTerraformElement(x interface{}) *bool
```
@@ -445,7 +452,7 @@ sqlvisualization.SqlVisualization_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlvisualization"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlvisualization"
sqlvisualization.SqlVisualization_IsTerraformResource(x interface{}) *bool
```
@@ -459,7 +466,7 @@ sqlvisualization.SqlVisualization_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlvisualization"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlvisualization"
sqlvisualization.SqlVisualization_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -864,7 +871,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlvisualization"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlvisualization"
&sqlvisualization.SqlVisualizationConfig {
Connection: interface{},
diff --git a/docs/sqlVisualization.java.md b/docs/sqlVisualization.java.md
index 1567476a3..a804f1ad5 100644
--- a/docs/sqlVisualization.java.md
+++ b/docs/sqlVisualization.java.md
@@ -191,6 +191,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -266,6 +267,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/sqlVisualization.python.md b/docs/sqlVisualization.python.md
index d9732d939..ee18263ad 100644
--- a/docs/sqlVisualization.python.md
+++ b/docs/sqlVisualization.python.md
@@ -189,6 +189,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -269,6 +270,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/sqlVisualization.typescript.md b/docs/sqlVisualization.typescript.md
index 0140b0b0f..cb00ef357 100644
--- a/docs/sqlVisualization.typescript.md
+++ b/docs/sqlVisualization.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/sqlWidget.csharp.md b/docs/sqlWidget.csharp.md
index d5523fa7d..390a06bee 100644
--- a/docs/sqlWidget.csharp.md
+++ b/docs/sqlWidget.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -135,6 +136,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1408,6 +1415,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1415,6 +1423,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/sqlWidget.go.md b/docs/sqlWidget.go.md
index a674d2b5d..e13c539b6 100644
--- a/docs/sqlWidget.go.md
+++ b/docs/sqlWidget.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
sqlwidget.NewSqlWidget(scope Construct, id *string, config SqlWidgetConfig) SqlWidget
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -135,6 +136,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -453,7 +460,7 @@ func ResetWidgetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
sqlwidget.SqlWidget_IsConstruct(x interface{}) *bool
```
@@ -485,7 +492,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
sqlwidget.SqlWidget_IsTerraformElement(x interface{}) *bool
```
@@ -499,7 +506,7 @@ sqlwidget.SqlWidget_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
sqlwidget.SqlWidget_IsTerraformResource(x interface{}) *bool
```
@@ -513,7 +520,7 @@ sqlwidget.SqlWidget_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
sqlwidget.SqlWidget_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -940,7 +947,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
&sqlwidget.SqlWidgetConfig {
Connection: interface{},
@@ -954,7 +961,7 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
Description: *string,
Id: *string,
Parameter: interface{},
- Position: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.sqlWidget.SqlWidgetPosition,
+ Position: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.sqlWidget.SqlWidgetPosition,
Text: *string,
Title: *string,
VisualizationId: *string,
@@ -1175,7 +1182,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
&sqlwidget.SqlWidgetParameter {
Name: *string,
@@ -1277,7 +1284,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
&sqlwidget.SqlWidgetPosition {
SizeX: *f64,
@@ -1367,7 +1374,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
sqlwidget.NewSqlWidgetParameterList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) SqlWidgetParameterList
```
@@ -1408,6 +1415,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1415,6 +1423,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1510,7 +1534,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
sqlwidget.NewSqlWidgetParameterOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) SqlWidgetParameterOutputReference
```
@@ -1937,7 +1961,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/sqlwidget"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/sqlwidget"
sqlwidget.NewSqlWidgetPositionOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) SqlWidgetPositionOutputReference
```
diff --git a/docs/sqlWidget.java.md b/docs/sqlWidget.java.md
index 102f4ee73..6b08026d8 100644
--- a/docs/sqlWidget.java.md
+++ b/docs/sqlWidget.java.md
@@ -206,6 +206,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -287,6 +288,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1568,6 +1575,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1575,6 +1583,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/sqlWidget.python.md b/docs/sqlWidget.python.md
index 4bd45696d..5b3cff37c 100644
--- a/docs/sqlWidget.python.md
+++ b/docs/sqlWidget.python.md
@@ -203,6 +203,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -289,6 +290,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1651,6 +1658,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1658,6 +1666,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/sqlWidget.typescript.md b/docs/sqlWidget.typescript.md
index 8a735ce94..406d2a2a8 100644
--- a/docs/sqlWidget.typescript.md
+++ b/docs/sqlWidget.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -135,6 +136,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1378,6 +1385,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1385,6 +1393,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/storageCredential.csharp.md b/docs/storageCredential.csharp.md
index e580fbf75..a7bf05faa 100644
--- a/docs/storageCredential.csharp.md
+++ b/docs/storageCredential.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -142,6 +143,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/storageCredential.go.md b/docs/storageCredential.go.md
index 943b4a4ef..8f14c1236 100644
--- a/docs/storageCredential.go.md
+++ b/docs/storageCredential.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.NewStorageCredential(scope Construct, id *string, config StorageCredentialConfig) StorageCredential
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -142,6 +143,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -520,7 +527,7 @@ func ResetReadOnly()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.StorageCredential_IsConstruct(x interface{}) *bool
```
@@ -552,7 +559,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.StorageCredential_IsTerraformElement(x interface{}) *bool
```
@@ -566,7 +573,7 @@ storagecredential.StorageCredential_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.StorageCredential_IsTerraformResource(x interface{}) *bool
```
@@ -580,7 +587,7 @@ storagecredential.StorageCredential_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.StorageCredential_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1095,7 +1102,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
&storagecredential.StorageCredentialAwsIamRole {
RoleArn: *string,
@@ -1155,7 +1162,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
&storagecredential.StorageCredentialAzureManagedIdentity {
AccessConnectorId: *string,
@@ -1215,7 +1222,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
&storagecredential.StorageCredentialAzureServicePrincipal {
ApplicationId: *string,
@@ -1275,7 +1282,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
&storagecredential.StorageCredentialConfig {
Connection: interface{},
@@ -1286,14 +1293,14 @@ import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecred
Provider: github.com/hashicorp/terraform-cdk-go/cdktf.TerraformProvider,
Provisioners: *[]interface{},
Name: *string,
- AwsIamRole: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.storageCredential.StorageCredentialAwsIamRole,
- AzureManagedIdentity: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.storageCredential.StorageCredentialAzureManagedIdentity,
- AzureServicePrincipal: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.storageCredential.StorageCredentialAzureServicePrincipal,
+ AwsIamRole: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.storageCredential.StorageCredentialAwsIamRole,
+ AzureManagedIdentity: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.storageCredential.StorageCredentialAzureManagedIdentity,
+ AzureServicePrincipal: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.storageCredential.StorageCredentialAzureServicePrincipal,
Comment: *string,
- DatabricksGcpServiceAccount: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.storageCredential.StorageCredentialDatabricksGcpServiceAccount,
+ DatabricksGcpServiceAccount: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.storageCredential.StorageCredentialDatabricksGcpServiceAccount,
ForceDestroy: interface{},
ForceUpdate: interface{},
- GcpServiceAccountKey: github.com/cdktf/cdktf-provider-databricks-go/databricks/v12.storageCredential.StorageCredentialGcpServiceAccountKey,
+ GcpServiceAccountKey: github.com/cdktf/cdktf-provider-databricks-go/databricks/v13.storageCredential.StorageCredentialGcpServiceAccountKey,
Id: *string,
MetastoreId: *string,
Owner: *string,
@@ -1572,7 +1579,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
&storagecredential.StorageCredentialDatabricksGcpServiceAccount {
CredentialId: *string,
@@ -1618,7 +1625,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
&storagecredential.StorageCredentialGcpServiceAccountKey {
Email: *string,
@@ -1680,7 +1687,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.NewStorageCredentialAwsIamRoleOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) StorageCredentialAwsIamRoleOutputReference
```
@@ -2009,7 +2016,7 @@ func InternalValue() StorageCredentialAwsIamRole
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.NewStorageCredentialAzureManagedIdentityOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) StorageCredentialAzureManagedIdentityOutputReference
```
@@ -2338,7 +2345,7 @@ func InternalValue() StorageCredentialAzureManagedIdentity
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.NewStorageCredentialAzureServicePrincipalOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) StorageCredentialAzureServicePrincipalOutputReference
```
@@ -2653,7 +2660,7 @@ func InternalValue() StorageCredentialAzureServicePrincipal
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.NewStorageCredentialDatabricksGcpServiceAccountOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) StorageCredentialDatabricksGcpServiceAccountOutputReference
```
@@ -2960,7 +2967,7 @@ func InternalValue() StorageCredentialDatabricksGcpServiceAccount
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/storagecredential"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/storagecredential"
storagecredential.NewStorageCredentialGcpServiceAccountKeyOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string) StorageCredentialGcpServiceAccountKeyOutputReference
```
diff --git a/docs/storageCredential.java.md b/docs/storageCredential.java.md
index f3a25095a..f03be6e74 100644
--- a/docs/storageCredential.java.md
+++ b/docs/storageCredential.java.md
@@ -254,6 +254,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -342,6 +343,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/storageCredential.python.md b/docs/storageCredential.python.md
index ec69b9e2a..663a1b704 100644
--- a/docs/storageCredential.python.md
+++ b/docs/storageCredential.python.md
@@ -249,6 +249,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -342,6 +343,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/storageCredential.typescript.md b/docs/storageCredential.typescript.md
index 2588fdf42..ca9fc1a18 100644
--- a/docs/storageCredential.typescript.md
+++ b/docs/storageCredential.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -142,6 +143,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/systemSchema.csharp.md b/docs/systemSchema.csharp.md
index 740af4668..0ae86339c 100644
--- a/docs/systemSchema.csharp.md
+++ b/docs/systemSchema.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/systemSchema.go.md b/docs/systemSchema.go.md
index 6555d08d2..7564683fd 100644
--- a/docs/systemSchema.go.md
+++ b/docs/systemSchema.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/systemschema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/systemschema"
systemschema.NewSystemSchema(scope Construct, id *string, config SystemSchemaConfig) SystemSchema
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -392,7 +399,7 @@ func ResetState()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/systemschema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/systemschema"
systemschema.SystemSchema_IsConstruct(x interface{}) *bool
```
@@ -424,7 +431,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/systemschema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/systemschema"
systemschema.SystemSchema_IsTerraformElement(x interface{}) *bool
```
@@ -438,7 +445,7 @@ systemschema.SystemSchema_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/systemschema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/systemschema"
systemschema.SystemSchema_IsTerraformResource(x interface{}) *bool
```
@@ -452,7 +459,7 @@ systemschema.SystemSchema_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/systemschema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/systemschema"
systemschema.SystemSchema_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -758,7 +765,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/systemschema"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/systemschema"
&systemschema.SystemSchemaConfig {
Connection: interface{},
diff --git a/docs/systemSchema.java.md b/docs/systemSchema.java.md
index 1c3eda0e0..23065e990 100644
--- a/docs/systemSchema.java.md
+++ b/docs/systemSchema.java.md
@@ -141,6 +141,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -215,6 +216,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/systemSchema.python.md b/docs/systemSchema.python.md
index fe1256cce..e0c4eeacf 100644
--- a/docs/systemSchema.python.md
+++ b/docs/systemSchema.python.md
@@ -139,6 +139,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -218,6 +219,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/systemSchema.typescript.md b/docs/systemSchema.typescript.md
index 66c8d33f4..1ac734213 100644
--- a/docs/systemSchema.typescript.md
+++ b/docs/systemSchema.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -128,6 +129,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/table.csharp.md b/docs/table.csharp.md
index 5b5ae7719..66d231dbf 100644
--- a/docs/table.csharp.md
+++ b/docs/table.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
@@ -1512,6 +1519,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1519,6 +1527,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```csharp
+private DynamicListTerraformIterator AllWithMapKey(string MapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `MapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `ComputeFqn`
```csharp
diff --git a/docs/table.go.md b/docs/table.go.md
index 38e4def58..ecbb1eb45 100644
--- a/docs/table.go.md
+++ b/docs/table.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/table"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/table"
table.NewTable(scope Construct, id *string, config TableConfig) Table
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -433,7 +440,7 @@ func ResetViewDefinition()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/table"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/table"
table.Table_IsConstruct(x interface{}) *bool
```
@@ -465,7 +472,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/table"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/table"
table.Table_IsTerraformElement(x interface{}) *bool
```
@@ -479,7 +486,7 @@ table.Table_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/table"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/table"
table.Table_IsTerraformResource(x interface{}) *bool
```
@@ -493,7 +500,7 @@ table.Table_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/table"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/table"
table.Table_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1008,7 +1015,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/table"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/table"
&table.TableColumn {
Name: *string,
@@ -1180,7 +1187,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/table"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/table"
&table.TableConfig {
Connection: interface{},
@@ -1471,7 +1478,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/table"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/table"
table.NewTableColumnList(terraformResource IInterpolatingParent, terraformAttribute *string, wrapsSet *bool) TableColumnList
```
@@ -1512,6 +1519,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| AllWithMapKey
| Creating an iterator for this complex list. |
| ComputeFqn
| *No description.* |
| Resolve
| Produce the Token's value at resolution time. |
| ToString
| Return a string representation of this resolvable object. |
@@ -1519,6 +1527,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `AllWithMapKey`
+
+```go
+func AllWithMapKey(mapKeyAttributeName *string) DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* *string
+
+---
+
##### `ComputeFqn`
```go
@@ -1614,7 +1638,7 @@ func InternalValue() interface{}
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/table"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/table"
table.NewTableColumnOutputReference(terraformResource IInterpolatingParent, terraformAttribute *string, complexObjectIndex *f64, complexObjectIsFromSet *bool) TableColumnOutputReference
```
diff --git a/docs/table.java.md b/docs/table.java.md
index 86763efa1..fb193d5d9 100644
--- a/docs/table.java.md
+++ b/docs/table.java.md
@@ -244,6 +244,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -323,6 +324,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
@@ -1710,6 +1717,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1717,6 +1725,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```java
+public DynamicListTerraformIterator allWithMapKey(java.lang.String mapKeyAttributeName)
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* java.lang.String
+
+---
+
##### `computeFqn`
```java
diff --git a/docs/table.python.md b/docs/table.python.md
index 3b26c7c1d..0e3b4ab25 100644
--- a/docs/table.python.md
+++ b/docs/table.python.md
@@ -241,6 +241,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -325,6 +326,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
@@ -1753,6 +1760,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| all_with_map_key
| Creating an iterator for this complex list. |
| compute_fqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| to_string
| Return a string representation of this resolvable object. |
@@ -1760,6 +1768,24 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `all_with_map_key`
+
+```python
+def all_with_map_key(
+ map_key_attribute_name: str
+) -> DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `map_key_attribute_name`Required
+
+- *Type:* str
+
+---
+
##### `compute_fqn`
```python
diff --git a/docs/table.typescript.md b/docs/table.typescript.md
index 9b909631d..e07a9e701 100644
--- a/docs/table.typescript.md
+++ b/docs/table.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -133,6 +134,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
@@ -1479,6 +1486,7 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
| **Name** | **Description** |
| --- | --- |
+| allWithMapKey
| Creating an iterator for this complex list. |
| computeFqn
| *No description.* |
| resolve
| Produce the Token's value at resolution time. |
| toString
| Return a string representation of this resolvable object. |
@@ -1486,6 +1494,22 @@ whether the list is wrapping a set (will add tolist() to be able to access an it
---
+##### `allWithMapKey`
+
+```typescript
+public allWithMapKey(mapKeyAttributeName: string): DynamicListTerraformIterator
+```
+
+Creating an iterator for this complex list.
+
+The list will be converted into a map with the mapKeyAttributeName as the key.
+
+###### `mapKeyAttributeName`Required
+
+- *Type:* string
+
+---
+
##### `computeFqn`
```typescript
diff --git a/docs/token.csharp.md b/docs/token.csharp.md
index 4060e30ba..a82993330 100644
--- a/docs/token.csharp.md
+++ b/docs/token.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/token.go.md b/docs/token.go.md
index 37ef22b33..e52019900 100644
--- a/docs/token.go.md
+++ b/docs/token.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/token"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/token"
token.NewToken(scope Construct, id *string, config TokenConfig) Token
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -413,7 +420,7 @@ func ResetTokenId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/token"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/token"
token.Token_IsConstruct(x interface{}) *bool
```
@@ -445,7 +452,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/token"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/token"
token.Token_IsTerraformElement(x interface{}) *bool
```
@@ -459,7 +466,7 @@ token.Token_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/token"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/token"
token.Token_IsTerraformResource(x interface{}) *bool
```
@@ -473,7 +480,7 @@ token.Token_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/token"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/token"
token.Token_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -845,7 +852,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/token"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/token"
&token.TokenConfig {
Connection: interface{},
diff --git a/docs/token.java.md b/docs/token.java.md
index bfba18683..483e51950 100644
--- a/docs/token.java.md
+++ b/docs/token.java.md
@@ -171,6 +171,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -248,6 +249,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/token.python.md b/docs/token.python.md
index 8100d00c0..3fd389502 100644
--- a/docs/token.python.md
+++ b/docs/token.python.md
@@ -169,6 +169,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -251,6 +252,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/token.typescript.md b/docs/token.typescript.md
index 8ce23767e..5ed293667 100644
--- a/docs/token.typescript.md
+++ b/docs/token.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -131,6 +132,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/user.csharp.md b/docs/user.csharp.md
index 7d5e08981..3d62f2d71 100644
--- a/docs/user.csharp.md
+++ b/docs/user.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -140,6 +141,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/user.go.md b/docs/user.go.md
index 862c32cb1..2a53ece68 100644
--- a/docs/user.go.md
+++ b/docs/user.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/user"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/user"
user.NewUser(scope Construct, id *string, config UserConfig) User
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -140,6 +141,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -476,7 +483,7 @@ func ResetWorkspaceAccess()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/user"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/user"
user.User_IsConstruct(x interface{}) *bool
```
@@ -508,7 +515,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/user"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/user"
user.User_IsTerraformElement(x interface{}) *bool
```
@@ -522,7 +529,7 @@ user.User_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/user"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/user"
user.User_IsTerraformResource(x interface{}) *bool
```
@@ -536,7 +543,7 @@ user.User_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/user"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/user"
user.User_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -1117,7 +1124,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/user"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/user"
&user.UserConfig {
Connection: interface{},
diff --git a/docs/user.java.md b/docs/user.java.md
index 021ca8dab..59ffb936a 100644
--- a/docs/user.java.md
+++ b/docs/user.java.md
@@ -280,6 +280,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -366,6 +367,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/user.python.md b/docs/user.python.md
index fd854c9de..86af94239 100644
--- a/docs/user.python.md
+++ b/docs/user.python.md
@@ -269,6 +269,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -360,6 +361,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/user.typescript.md b/docs/user.typescript.md
index f5eac3054..acec3663d 100644
--- a/docs/user.typescript.md
+++ b/docs/user.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -140,6 +141,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/userInstanceProfile.csharp.md b/docs/userInstanceProfile.csharp.md
index 601ff9424..f119b96c3 100644
--- a/docs/userInstanceProfile.csharp.md
+++ b/docs/userInstanceProfile.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/userInstanceProfile.go.md b/docs/userInstanceProfile.go.md
index 558761a51..144134092 100644
--- a/docs/userInstanceProfile.go.md
+++ b/docs/userInstanceProfile.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userinstanceprofile"
userinstanceprofile.NewUserInstanceProfile(scope Construct, id *string, config UserInstanceProfileConfig) UserInstanceProfile
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userinstanceprofile"
userinstanceprofile.UserInstanceProfile_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userinstanceprofile"
userinstanceprofile.UserInstanceProfile_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ userinstanceprofile.UserInstanceProfile_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userinstanceprofile"
userinstanceprofile.UserInstanceProfile_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ userinstanceprofile.UserInstanceProfile_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userinstanceprofile"
userinstanceprofile.UserInstanceProfile_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -733,7 +740,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userinstanceprofile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userinstanceprofile"
&userinstanceprofile.UserInstanceProfileConfig {
Connection: interface{},
diff --git a/docs/userInstanceProfile.java.md b/docs/userInstanceProfile.java.md
index 3b06e4060..7fa206000 100644
--- a/docs/userInstanceProfile.java.md
+++ b/docs/userInstanceProfile.java.md
@@ -141,6 +141,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -213,6 +214,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/userInstanceProfile.python.md b/docs/userInstanceProfile.python.md
index 13f62ccc9..33f79571f 100644
--- a/docs/userInstanceProfile.python.md
+++ b/docs/userInstanceProfile.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -216,6 +217,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/userInstanceProfile.typescript.md b/docs/userInstanceProfile.typescript.md
index b0b6358cf..dc1394305 100644
--- a/docs/userInstanceProfile.typescript.md
+++ b/docs/userInstanceProfile.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/userRole.csharp.md b/docs/userRole.csharp.md
index cb3f271ea..a6c82e305 100644
--- a/docs/userRole.csharp.md
+++ b/docs/userRole.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/userRole.go.md b/docs/userRole.go.md
index 7c37d0418..f1902f388 100644
--- a/docs/userRole.go.md
+++ b/docs/userRole.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userrole"
userrole.NewUserRole(scope Construct, id *string, config UserRoleConfig) UserRole
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -378,7 +385,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userrole"
userrole.UserRole_IsConstruct(x interface{}) *bool
```
@@ -410,7 +417,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userrole"
userrole.UserRole_IsTerraformElement(x interface{}) *bool
```
@@ -424,7 +431,7 @@ userrole.UserRole_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userrole"
userrole.UserRole_IsTerraformResource(x interface{}) *bool
```
@@ -438,7 +445,7 @@ userrole.UserRole_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userrole"
userrole.UserRole_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -733,7 +740,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/userrole"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/userrole"
&userrole.UserRoleConfig {
Connection: interface{},
diff --git a/docs/userRole.java.md b/docs/userRole.java.md
index 9ed3ad75d..442cc63a7 100644
--- a/docs/userRole.java.md
+++ b/docs/userRole.java.md
@@ -141,6 +141,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -213,6 +214,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/userRole.python.md b/docs/userRole.python.md
index 9e4e2a92b..b6f6b6d9a 100644
--- a/docs/userRole.python.md
+++ b/docs/userRole.python.md
@@ -139,6 +139,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -216,6 +217,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/userRole.typescript.md b/docs/userRole.typescript.md
index 9c188b173..9852b4ff3 100644
--- a/docs/userRole.typescript.md
+++ b/docs/userRole.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -126,6 +127,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/volume.csharp.md b/docs/volume.csharp.md
index 556987e14..4543818ab 100644
--- a/docs/volume.csharp.md
+++ b/docs/volume.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/volume.go.md b/docs/volume.go.md
index e8d16a8b3..dcfedf477 100644
--- a/docs/volume.go.md
+++ b/docs/volume.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/volume"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/volume"
volume.NewVolume(scope Construct, id *string, config VolumeConfig) Volume
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -399,7 +406,7 @@ func ResetStorageLocation()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/volume"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/volume"
volume.Volume_IsConstruct(x interface{}) *bool
```
@@ -431,7 +438,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/volume"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/volume"
volume.Volume_IsTerraformElement(x interface{}) *bool
```
@@ -445,7 +452,7 @@ volume.Volume_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/volume"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/volume"
volume.Volume_IsTerraformResource(x interface{}) *bool
```
@@ -459,7 +466,7 @@ volume.Volume_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/volume"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/volume"
volume.Volume_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -864,7 +871,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/volume"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/volume"
&volume.VolumeConfig {
Connection: interface{},
diff --git a/docs/volume.java.md b/docs/volume.java.md
index 2736b2309..3f6a4de28 100644
--- a/docs/volume.java.md
+++ b/docs/volume.java.md
@@ -191,6 +191,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -266,6 +267,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/volume.python.md b/docs/volume.python.md
index fdd7ff671..0e93aac93 100644
--- a/docs/volume.python.md
+++ b/docs/volume.python.md
@@ -189,6 +189,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -269,6 +270,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/volume.typescript.md b/docs/volume.typescript.md
index 09ce00bf4..1c77a8f35 100644
--- a/docs/volume.typescript.md
+++ b/docs/volume.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -129,6 +130,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/workspaceConf.csharp.md b/docs/workspaceConf.csharp.md
index 91a3f7220..c2d69ea88 100644
--- a/docs/workspaceConf.csharp.md
+++ b/docs/workspaceConf.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/workspaceConf.go.md b/docs/workspaceConf.go.md
index 87bcf5b74..ae3d897a8 100644
--- a/docs/workspaceConf.go.md
+++ b/docs/workspaceConf.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspaceconf"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspaceconf"
workspaceconf.NewWorkspaceConf(scope Construct, id *string, config WorkspaceConfConfig) WorkspaceConf
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -385,7 +392,7 @@ func ResetId()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspaceconf"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspaceconf"
workspaceconf.WorkspaceConf_IsConstruct(x interface{}) *bool
```
@@ -417,7 +424,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspaceconf"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspaceconf"
workspaceconf.WorkspaceConf_IsTerraformElement(x interface{}) *bool
```
@@ -431,7 +438,7 @@ workspaceconf.WorkspaceConf_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspaceconf"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspaceconf"
workspaceconf.WorkspaceConf_IsTerraformResource(x interface{}) *bool
```
@@ -445,7 +452,7 @@ workspaceconf.WorkspaceConf_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspaceconf"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspaceconf"
workspaceconf.WorkspaceConf_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -718,7 +725,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspaceconf"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspaceconf"
&workspaceconf.WorkspaceConfConfig {
Connection: interface{},
diff --git a/docs/workspaceConf.java.md b/docs/workspaceConf.java.md
index adcd5c719..b8333e0ab 100644
--- a/docs/workspaceConf.java.md
+++ b/docs/workspaceConf.java.md
@@ -131,6 +131,7 @@ If you experience problems setting this value it might not be settable. Please t
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -204,6 +205,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/workspaceConf.python.md b/docs/workspaceConf.python.md
index cf4b0aaaf..32bb01965 100644
--- a/docs/workspaceConf.python.md
+++ b/docs/workspaceConf.python.md
@@ -129,6 +129,7 @@ If you experience problems setting this value it might not be settable. Please t
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -207,6 +208,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/workspaceConf.typescript.md b/docs/workspaceConf.typescript.md
index 436fa04cd..f47b1f38a 100644
--- a/docs/workspaceConf.typescript.md
+++ b/docs/workspaceConf.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -127,6 +128,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/docs/workspaceFile.csharp.md b/docs/workspaceFile.csharp.md
index 8db6ab270..0f722db80 100644
--- a/docs/workspaceFile.csharp.md
+++ b/docs/workspaceFile.csharp.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ private void ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```csharp
+private object ToHclTerraform()
+```
+
##### `ToMetadata`
```csharp
diff --git a/docs/workspaceFile.go.md b/docs/workspaceFile.go.md
index 5f367c8e4..a0aa6241b 100644
--- a/docs/workspaceFile.go.md
+++ b/docs/workspaceFile.go.md
@@ -9,7 +9,7 @@ Represents a {@link https://registry.terraform.io/providers/databricks/databrick
#### Initializers
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspacefile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspacefile"
workspacefile.NewWorkspaceFile(scope Construct, id *string, config WorkspaceFileConfig) WorkspaceFile
```
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| AddOverride
| *No description.* |
| OverrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| ResetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| ToHclTerraform
| *No description.* |
| ToMetadata
| *No description.* |
| ToTerraform
| Adds this resource to the terraform JSON output. |
| AddMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ func ResetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `ToHclTerraform`
+
+```go
+func ToHclTerraform() interface{}
+```
+
##### `ToMetadata`
```go
@@ -406,7 +413,7 @@ func ResetSource()
##### `IsConstruct`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspacefile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspacefile"
workspacefile.WorkspaceFile_IsConstruct(x interface{}) *bool
```
@@ -438,7 +445,7 @@ Any object.
##### `IsTerraformElement`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspacefile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspacefile"
workspacefile.WorkspaceFile_IsTerraformElement(x interface{}) *bool
```
@@ -452,7 +459,7 @@ workspacefile.WorkspaceFile_IsTerraformElement(x interface{}) *bool
##### `IsTerraformResource`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspacefile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspacefile"
workspacefile.WorkspaceFile_IsTerraformResource(x interface{}) *bool
```
@@ -466,7 +473,7 @@ workspacefile.WorkspaceFile_IsTerraformResource(x interface{}) *bool
##### `GenerateConfigForImport`
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspacefile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspacefile"
workspacefile.WorkspaceFile_GenerateConfigForImport(scope Construct, importToId *string, importFromId *string, provider TerraformProvider) ImportableResource
```
@@ -849,7 +856,7 @@ func TfResourceType() *string
#### Initializer
```go
-import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v12/workspacefile"
+import "github.com/cdktf/cdktf-provider-databricks-go/databricks/v13/workspacefile"
&workspacefile.WorkspaceFileConfig {
Connection: interface{},
diff --git a/docs/workspaceFile.java.md b/docs/workspaceFile.java.md
index f7d3be4d1..2a3b47738 100644
--- a/docs/workspaceFile.java.md
+++ b/docs/workspaceFile.java.md
@@ -171,6 +171,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -247,6 +248,12 @@ public void resetOverrideLogicalId()
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```java
+public java.lang.Object toHclTerraform()
+```
+
##### `toMetadata`
```java
diff --git a/docs/workspaceFile.python.md b/docs/workspaceFile.python.md
index e87abcb49..d7307d269 100644
--- a/docs/workspaceFile.python.md
+++ b/docs/workspaceFile.python.md
@@ -169,6 +169,7 @@ Docs at Terraform Registry: {@link https://registry.terraform.io/providers/datab
| add_override
| *No description.* |
| override_logical_id
| Overrides the auto-generated logical ID with a specific ID. |
| reset_override_logical_id
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| to_hcl_terraform
| *No description.* |
| to_metadata
| *No description.* |
| to_terraform
| Adds this resource to the terraform JSON output. |
| add_move_target
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -250,6 +251,12 @@ def reset_override_logical_id() -> None
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `to_hcl_terraform`
+
+```python
+def to_hcl_terraform() -> typing.Any
+```
+
##### `to_metadata`
```python
diff --git a/docs/workspaceFile.typescript.md b/docs/workspaceFile.typescript.md
index 95867e605..0d649eeb1 100644
--- a/docs/workspaceFile.typescript.md
+++ b/docs/workspaceFile.typescript.md
@@ -54,6 +54,7 @@ Must be unique amongst siblings in the same scope
| addOverride
| *No description.* |
| overrideLogicalId
| Overrides the auto-generated logical ID with a specific ID. |
| resetOverrideLogicalId
| Resets a previously passed logical Id to use the auto-generated logical id again. |
+| toHclTerraform
| *No description.* |
| toMetadata
| *No description.* |
| toTerraform
| Adds this resource to the terraform JSON output. |
| addMoveTarget
| Adds a user defined moveTarget string to this resource to be later used in .moveTo(moveTarget) to resolve the location of the move. |
@@ -130,6 +131,12 @@ public resetOverrideLogicalId(): void
Resets a previously passed logical Id to use the auto-generated logical id again.
+##### `toHclTerraform`
+
+```typescript
+public toHclTerraform(): any
+```
+
##### `toMetadata`
```typescript
diff --git a/package.json b/package.json
index e0ca1fe7d..badd23e58 100644
--- a/package.json
+++ b/package.json
@@ -47,8 +47,8 @@
"@actions/core": "^1.1.0",
"@cdktf/provider-project": "^0.5.0",
"@types/node": "^18",
- "cdktf": "^0.19.0",
- "cdktf-cli": "^0.19.0",
+ "cdktf": "^0.20.0",
+ "cdktf-cli": "^0.20.0",
"constructs": "^10.3.0",
"dot-prop": "^5.2.0",
"jsii": "~5.2.0",
@@ -56,13 +56,13 @@
"jsii-docgen": "^10.2.3",
"jsii-pacmak": "^1.93.0",
"jsii-rosetta": "~5.2.0",
- "projen": "^0.78.11",
+ "projen": "^0.78.13",
"semver": "^7.5.3",
"standard-version": "^9",
"typescript": "~5.2.0"
},
"peerDependencies": {
- "cdktf": "^0.19.0",
+ "cdktf": "^0.20.0",
"constructs": "^10.3.0"
},
"resolutions": {
diff --git a/scripts/check-for-upgrades.js b/scripts/check-for-upgrades.js
index 8b46e0bc6..45aa6a515 100644
--- a/scripts/check-for-upgrades.js
+++ b/scripts/check-for-upgrades.js
@@ -83,7 +83,7 @@ async function getCurrentProviderVersion() {
}
// SEE NOTICE AT THE TOP WHY THIS IS INLINED CURRENTLY
-// copied from https://github.com/hashicorp/terraform-cdk/blob/b23fc173715e90c0a5b8c8633d9ec7f71edf9ed4/packages/cdktf-cli/lib/dependencies/version-constraints.ts
+// copied from https://github.com/hashicorp/terraform-cdk/blob/df858ccf4ac71a168e3636f053c6743324c98332/packages/%40cdktf/cli-core/src/lib/dependencies/version-constraints.ts
// and converted to JavaScript
// constraints can be prefixed with "~>", ">", "<", "=", ">=", "<=" or "!="
@@ -122,16 +122,26 @@ function versionMatchesConstraint(version, constraint) {
case "~>": {
// allows rightmost version component to increment
+ const parts = parsed.version.split(".");
+ const minorSpecified = parts.length === 2;
+ const majorIsZero = parts[0] === "0";
+
// ~>2.0 which allows 2.1 and 2.1.1 needs special handling as
// npm semver handles "~" differently for ~2.0 than for ~2 or ~2.1.0
// So we need to use "^" (e.g. ^2.0) for this case
// see: https://github.com/npm/node-semver/issues/11
- const allowMinorAndPatchOnly = parsed.version.split(".").length === 2;
+ const allowMinorAndPatchOnly = minorSpecified;
- const range = allowMinorAndPatchOnly
+ let range = allowMinorAndPatchOnly
? `^${parsed.version}`
: `~${parsed.version}`;
+ // versions below 1.0 are treated a bit differently in NPM than in Terraform
+ // meaning that NPMs ^0.4 doesn't allow 0.55 while TFs ~>0.4 allows 0.55
+ if (majorIsZero && minorSpecified) {
+ range = `>=${parsed.version} <1.0.0`;
+ }
+
return semver.satisfies(version, range);
}
case ">=":
diff --git a/src/access-control-rule-set/README.md b/src/access-control-rule-set/README.md
index 0017aa51e..b40dc0080 100644
--- a/src/access-control-rule-set/README.md
+++ b/src/access-control-rule-set/README.md
@@ -1,3 +1,3 @@
# `databricks_access_control_rule_set`
-Refer to the Terraform Registory for docs: [`databricks_access_control_rule_set`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/access_control_rule_set).
+Refer to the Terraform Registry for docs: [`databricks_access_control_rule_set`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/access_control_rule_set).
diff --git a/src/access-control-rule-set/index.ts b/src/access-control-rule-set/index.ts
index 702773fdd..5ab39e786 100644
--- a/src/access-control-rule-set/index.ts
+++ b/src/access-control-rule-set/index.ts
@@ -52,6 +52,31 @@ export function accessControlRuleSetGrantRulesToTerraform(struct?: AccessControl
}
}
+
+export function accessControlRuleSetGrantRulesToHclTerraform(struct?: AccessControlRuleSetGrantRules | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ principals: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.principals),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ role: {
+ value: cdktf.stringToHclTerraform(struct!.role),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class AccessControlRuleSetGrantRulesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -273,4 +298,30 @@ export class AccessControlRuleSet extends cdktf.TerraformResource {
grant_rules: cdktf.listMapper(accessControlRuleSetGrantRulesToTerraform, true)(this._grantRules.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ grant_rules: {
+ value: cdktf.listMapperHcl(accessControlRuleSetGrantRulesToHclTerraform, true)(this._grantRules.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "AccessControlRuleSetGrantRulesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/artifact-allowlist/README.md b/src/artifact-allowlist/README.md
index 1da14cd3b..770065ba4 100644
--- a/src/artifact-allowlist/README.md
+++ b/src/artifact-allowlist/README.md
@@ -1,3 +1,3 @@
# `databricks_artifact_allowlist`
-Refer to the Terraform Registory for docs: [`databricks_artifact_allowlist`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/artifact_allowlist).
+Refer to the Terraform Registry for docs: [`databricks_artifact_allowlist`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/artifact_allowlist).
diff --git a/src/artifact-allowlist/index.ts b/src/artifact-allowlist/index.ts
index 4cbc8c292..76cf95609 100644
--- a/src/artifact-allowlist/index.ts
+++ b/src/artifact-allowlist/index.ts
@@ -64,6 +64,31 @@ export function artifactAllowlistArtifactMatcherToTerraform(struct?: ArtifactAll
}
}
+
+export function artifactAllowlistArtifactMatcherToHclTerraform(struct?: ArtifactAllowlistArtifactMatcher | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ artifact: {
+ value: cdktf.stringToHclTerraform(struct!.artifact),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ match_type: {
+ value: cdktf.stringToHclTerraform(struct!.matchType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ArtifactAllowlistArtifactMatcherOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -328,4 +353,48 @@ export class ArtifactAllowlist extends cdktf.TerraformResource {
artifact_matcher: cdktf.listMapper(artifactAllowlistArtifactMatcherToTerraform, true)(this._artifactMatcher.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ artifact_type: {
+ value: cdktf.stringToHclTerraform(this._artifactType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ created_at: {
+ value: cdktf.numberToHclTerraform(this._createdAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ created_by: {
+ value: cdktf.stringToHclTerraform(this._createdBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ artifact_matcher: {
+ value: cdktf.listMapperHcl(artifactAllowlistArtifactMatcherToHclTerraform, true)(this._artifactMatcher.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "ArtifactAllowlistArtifactMatcherList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/aws-s3-mount/README.md b/src/aws-s3-mount/README.md
index 10c7c4ab7..031770f84 100644
--- a/src/aws-s3-mount/README.md
+++ b/src/aws-s3-mount/README.md
@@ -1,3 +1,3 @@
# `databricks_aws_s3_mount`
-Refer to the Terraform Registory for docs: [`databricks_aws_s3_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/aws_s3_mount).
+Refer to the Terraform Registry for docs: [`databricks_aws_s3_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/aws_s3_mount).
diff --git a/src/aws-s3-mount/index.ts b/src/aws-s3-mount/index.ts
index 47181c0a1..7952325d4 100644
--- a/src/aws-s3-mount/index.ts
+++ b/src/aws-s3-mount/index.ts
@@ -191,4 +191,42 @@ export class AwsS3Mount extends cdktf.TerraformResource {
s3_bucket_name: cdktf.stringToTerraform(this._s3BucketName),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profile: {
+ value: cdktf.stringToHclTerraform(this._instanceProfile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ mount_name: {
+ value: cdktf.stringToHclTerraform(this._mountName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ s3_bucket_name: {
+ value: cdktf.stringToHclTerraform(this._s3BucketName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/azure-adls-gen1-mount/README.md b/src/azure-adls-gen1-mount/README.md
index 3bba6c132..c568ee69d 100644
--- a/src/azure-adls-gen1-mount/README.md
+++ b/src/azure-adls-gen1-mount/README.md
@@ -1,3 +1,3 @@
# `databricks_azure_adls_gen1_mount`
-Refer to the Terraform Registory for docs: [`databricks_azure_adls_gen1_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/azure_adls_gen1_mount).
+Refer to the Terraform Registry for docs: [`databricks_azure_adls_gen1_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/azure_adls_gen1_mount).
diff --git a/src/azure-adls-gen1-mount/index.ts b/src/azure-adls-gen1-mount/index.ts
index 7e8ef2d94..6be13a309 100644
--- a/src/azure-adls-gen1-mount/index.ts
+++ b/src/azure-adls-gen1-mount/index.ts
@@ -289,4 +289,72 @@ export class AzureAdlsGen1Mount extends cdktf.TerraformResource {
tenant_id: cdktf.stringToTerraform(this._tenantId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ client_id: {
+ value: cdktf.stringToHclTerraform(this._clientId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret_key: {
+ value: cdktf.stringToHclTerraform(this._clientSecretKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret_scope: {
+ value: cdktf.stringToHclTerraform(this._clientSecretScope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory: {
+ value: cdktf.stringToHclTerraform(this._directory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ mount_name: {
+ value: cdktf.stringToHclTerraform(this._mountName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf_prefix: {
+ value: cdktf.stringToHclTerraform(this._sparkConfPrefix),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_resource_name: {
+ value: cdktf.stringToHclTerraform(this._storageResourceName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tenant_id: {
+ value: cdktf.stringToHclTerraform(this._tenantId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/azure-adls-gen2-mount/README.md b/src/azure-adls-gen2-mount/README.md
index 466be2f83..a7adcfcbe 100644
--- a/src/azure-adls-gen2-mount/README.md
+++ b/src/azure-adls-gen2-mount/README.md
@@ -1,3 +1,3 @@
# `databricks_azure_adls_gen2_mount`
-Refer to the Terraform Registory for docs: [`databricks_azure_adls_gen2_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/azure_adls_gen2_mount).
+Refer to the Terraform Registry for docs: [`databricks_azure_adls_gen2_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/azure_adls_gen2_mount).
diff --git a/src/azure-adls-gen2-mount/index.ts b/src/azure-adls-gen2-mount/index.ts
index 74f4cdf60..0c7a21abb 100644
--- a/src/azure-adls-gen2-mount/index.ts
+++ b/src/azure-adls-gen2-mount/index.ts
@@ -305,4 +305,78 @@ export class AzureAdlsGen2Mount extends cdktf.TerraformResource {
tenant_id: cdktf.stringToTerraform(this._tenantId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ client_id: {
+ value: cdktf.stringToHclTerraform(this._clientId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret_key: {
+ value: cdktf.stringToHclTerraform(this._clientSecretKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret_scope: {
+ value: cdktf.stringToHclTerraform(this._clientSecretScope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ container_name: {
+ value: cdktf.stringToHclTerraform(this._containerName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory: {
+ value: cdktf.stringToHclTerraform(this._directory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ initialize_file_system: {
+ value: cdktf.booleanToHclTerraform(this._initializeFileSystem),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ mount_name: {
+ value: cdktf.stringToHclTerraform(this._mountName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_account_name: {
+ value: cdktf.stringToHclTerraform(this._storageAccountName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tenant_id: {
+ value: cdktf.stringToHclTerraform(this._tenantId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/azure-blob-mount/README.md b/src/azure-blob-mount/README.md
index f9fee368d..7a064e11b 100644
--- a/src/azure-blob-mount/README.md
+++ b/src/azure-blob-mount/README.md
@@ -1,3 +1,3 @@
# `databricks_azure_blob_mount`
-Refer to the Terraform Registory for docs: [`databricks_azure_blob_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/azure_blob_mount).
+Refer to the Terraform Registry for docs: [`databricks_azure_blob_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/azure_blob_mount).
diff --git a/src/azure-blob-mount/index.ts b/src/azure-blob-mount/index.ts
index 41414e874..481204b03 100644
--- a/src/azure-blob-mount/index.ts
+++ b/src/azure-blob-mount/index.ts
@@ -267,4 +267,66 @@ export class AzureBlobMount extends cdktf.TerraformResource {
token_secret_scope: cdktf.stringToTerraform(this._tokenSecretScope),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ auth_type: {
+ value: cdktf.stringToHclTerraform(this._authType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ container_name: {
+ value: cdktf.stringToHclTerraform(this._containerName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory: {
+ value: cdktf.stringToHclTerraform(this._directory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ mount_name: {
+ value: cdktf.stringToHclTerraform(this._mountName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_account_name: {
+ value: cdktf.stringToHclTerraform(this._storageAccountName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ token_secret_key: {
+ value: cdktf.stringToHclTerraform(this._tokenSecretKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ token_secret_scope: {
+ value: cdktf.stringToHclTerraform(this._tokenSecretScope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/catalog-workspace-binding/README.md b/src/catalog-workspace-binding/README.md
index ac73d2589..584afb3a6 100644
--- a/src/catalog-workspace-binding/README.md
+++ b/src/catalog-workspace-binding/README.md
@@ -1,3 +1,3 @@
# `databricks_catalog_workspace_binding`
-Refer to the Terraform Registory for docs: [`databricks_catalog_workspace_binding`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/catalog_workspace_binding).
+Refer to the Terraform Registry for docs: [`databricks_catalog_workspace_binding`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/catalog_workspace_binding).
diff --git a/src/catalog-workspace-binding/index.ts b/src/catalog-workspace-binding/index.ts
index 567a3e5d3..55dc7139a 100644
--- a/src/catalog-workspace-binding/index.ts
+++ b/src/catalog-workspace-binding/index.ts
@@ -214,4 +214,48 @@ export class CatalogWorkspaceBinding extends cdktf.TerraformResource {
workspace_id: cdktf.numberToTerraform(this._workspaceId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ binding_type: {
+ value: cdktf.stringToHclTerraform(this._bindingType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(this._catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ securable_name: {
+ value: cdktf.stringToHclTerraform(this._securableName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ securable_type: {
+ value: cdktf.stringToHclTerraform(this._securableType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_id: {
+ value: cdktf.numberToHclTerraform(this._workspaceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/catalog/README.md b/src/catalog/README.md
index 35d9b1de2..7f7b3e3be 100644
--- a/src/catalog/README.md
+++ b/src/catalog/README.md
@@ -1,3 +1,3 @@
# `databricks_catalog`
-Refer to the Terraform Registory for docs: [`databricks_catalog`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/catalog).
+Refer to the Terraform Registry for docs: [`databricks_catalog`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/catalog).
diff --git a/src/catalog/index.ts b/src/catalog/index.ts
index 1fc27e0ca..154aa82b1 100644
--- a/src/catalog/index.ts
+++ b/src/catalog/index.ts
@@ -365,4 +365,90 @@ export class Catalog extends cdktf.TerraformResource {
storage_root: cdktf.stringToTerraform(this._storageRoot),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ connection_name: {
+ value: cdktf.stringToHclTerraform(this._connectionName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ force_destroy: {
+ value: cdktf.booleanToHclTerraform(this._forceDestroy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ isolation_mode: {
+ value: cdktf.stringToHclTerraform(this._isolationMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ options: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._options),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ properties: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._properties),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ provider_name: {
+ value: cdktf.stringToHclTerraform(this._providerName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ share_name: {
+ value: cdktf.stringToHclTerraform(this._shareName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_root: {
+ value: cdktf.stringToHclTerraform(this._storageRoot),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/cluster-policy/README.md b/src/cluster-policy/README.md
index 96e43459b..6151b12e4 100644
--- a/src/cluster-policy/README.md
+++ b/src/cluster-policy/README.md
@@ -1,3 +1,3 @@
# `databricks_cluster_policy`
-Refer to the Terraform Registory for docs: [`databricks_cluster_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/cluster_policy).
+Refer to the Terraform Registry for docs: [`databricks_cluster_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/cluster_policy).
diff --git a/src/cluster-policy/index.ts b/src/cluster-policy/index.ts
index 67eae4baf..258310aaa 100644
--- a/src/cluster-policy/index.ts
+++ b/src/cluster-policy/index.ts
@@ -72,6 +72,31 @@ export function clusterPolicyLibrariesCranToTerraform(struct?: ClusterPolicyLibr
}
}
+
+export function clusterPolicyLibrariesCranToHclTerraform(struct?: ClusterPolicyLibrariesCranOutputReference | ClusterPolicyLibrariesCran): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterPolicyLibrariesCranOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -166,6 +191,37 @@ export function clusterPolicyLibrariesMavenToTerraform(struct?: ClusterPolicyLib
}
}
+
+export function clusterPolicyLibrariesMavenToHclTerraform(struct?: ClusterPolicyLibrariesMavenOutputReference | ClusterPolicyLibrariesMaven): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ coordinates: {
+ value: cdktf.stringToHclTerraform(struct!.coordinates),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ exclusions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.exclusions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterPolicyLibrariesMavenOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -277,6 +333,31 @@ export function clusterPolicyLibrariesPypiToTerraform(struct?: ClusterPolicyLibr
}
}
+
+export function clusterPolicyLibrariesPypiToHclTerraform(struct?: ClusterPolicyLibrariesPypiOutputReference | ClusterPolicyLibrariesPypi): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterPolicyLibrariesPypiOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -392,6 +473,55 @@ export function clusterPolicyLibrariesToTerraform(struct?: ClusterPolicyLibrarie
}
}
+
+export function clusterPolicyLibrariesToHclTerraform(struct?: ClusterPolicyLibraries | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ egg: {
+ value: cdktf.stringToHclTerraform(struct!.egg),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jar: {
+ value: cdktf.stringToHclTerraform(struct!.jar),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ whl: {
+ value: cdktf.stringToHclTerraform(struct!.whl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cran: {
+ value: clusterPolicyLibrariesCranToHclTerraform(struct!.cran),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterPolicyLibrariesCranList",
+ },
+ maven: {
+ value: clusterPolicyLibrariesMavenToHclTerraform(struct!.maven),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterPolicyLibrariesMavenList",
+ },
+ pypi: {
+ value: clusterPolicyLibrariesPypiToHclTerraform(struct!.pypi),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterPolicyLibrariesPypiList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterPolicyLibrariesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -794,4 +924,60 @@ export class ClusterPolicy extends cdktf.TerraformResource {
libraries: cdktf.listMapper(clusterPolicyLibrariesToTerraform, true)(this._libraries.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ definition: {
+ value: cdktf.stringToHclTerraform(this._definition),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ max_clusters_per_user: {
+ value: cdktf.numberToHclTerraform(this._maxClustersPerUser),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ policy_family_definition_overrides: {
+ value: cdktf.stringToHclTerraform(this._policyFamilyDefinitionOverrides),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ policy_family_id: {
+ value: cdktf.stringToHclTerraform(this._policyFamilyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ libraries: {
+ value: cdktf.listMapperHcl(clusterPolicyLibrariesToHclTerraform, true)(this._libraries.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterPolicyLibrariesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/cluster/README.md b/src/cluster/README.md
index 64a5c7396..d2f63008a 100644
--- a/src/cluster/README.md
+++ b/src/cluster/README.md
@@ -1,3 +1,3 @@
# `databricks_cluster`
-Refer to the Terraform Registory for docs: [`databricks_cluster`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/cluster).
+Refer to the Terraform Registry for docs: [`databricks_cluster`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/cluster).
diff --git a/src/cluster/index.ts b/src/cluster/index.ts
index 97960a125..369bc4a57 100644
--- a/src/cluster/index.ts
+++ b/src/cluster/index.ts
@@ -196,6 +196,31 @@ export function clusterAutoscaleToTerraform(struct?: ClusterAutoscaleOutputRefer
}
}
+
+export function clusterAutoscaleToHclTerraform(struct?: ClusterAutoscaleOutputReference | ClusterAutoscale): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ max_workers: {
+ value: cdktf.numberToHclTerraform(struct!.maxWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_workers: {
+ value: cdktf.numberToHclTerraform(struct!.minWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterAutoscaleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -318,6 +343,67 @@ export function clusterAwsAttributesToTerraform(struct?: ClusterAwsAttributesOut
}
}
+
+export function clusterAwsAttributesToHclTerraform(struct?: ClusterAwsAttributesOutputReference | ClusterAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_count: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_size: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -547,6 +633,37 @@ export function clusterAzureAttributesToTerraform(struct?: ClusterAzureAttribute
}
}
+
+export function clusterAzureAttributesToHclTerraform(struct?: ClusterAzureAttributesOutputReference | ClusterAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -656,6 +773,25 @@ export function clusterClusterLogConfDbfsToTerraform(struct?: ClusterClusterLogC
}
}
+
+export function clusterClusterLogConfDbfsToHclTerraform(struct?: ClusterClusterLogConfDbfsOutputReference | ClusterClusterLogConfDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterClusterLogConfDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -748,6 +884,61 @@ export function clusterClusterLogConfS3ToTerraform(struct?: ClusterClusterLogCon
}
}
+
+export function clusterClusterLogConfS3ToHclTerraform(struct?: ClusterClusterLogConfS3OutputReference | ClusterClusterLogConfS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterClusterLogConfS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -951,6 +1142,31 @@ export function clusterClusterLogConfToTerraform(struct?: ClusterClusterLogConfO
}
}
+
+export function clusterClusterLogConfToHclTerraform(struct?: ClusterClusterLogConfOutputReference | ClusterClusterLogConf): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dbfs: {
+ value: clusterClusterLogConfDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterClusterLogConfDbfsList",
+ },
+ s3: {
+ value: clusterClusterLogConfS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterClusterLogConfS3List",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterClusterLogConfOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1043,6 +1259,31 @@ export function clusterClusterMountInfoNetworkFilesystemInfoToTerraform(struct?:
}
}
+
+export function clusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct?: ClusterClusterMountInfoNetworkFilesystemInfoOutputReference | ClusterClusterMountInfoNetworkFilesystemInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ mount_options: {
+ value: cdktf.stringToHclTerraform(struct!.mountOptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ server_address: {
+ value: cdktf.stringToHclTerraform(struct!.serverAddress),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterClusterMountInfoNetworkFilesystemInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1139,6 +1380,37 @@ export function clusterClusterMountInfoToTerraform(struct?: ClusterClusterMountI
}
}
+
+export function clusterClusterMountInfoToHclTerraform(struct?: ClusterClusterMountInfo | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ local_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.localMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ remote_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.remoteMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_filesystem_info: {
+ value: clusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct!.networkFilesystemInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterClusterMountInfoNetworkFilesystemInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterClusterMountInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1279,6 +1551,31 @@ export function clusterDockerImageBasicAuthToTerraform(struct?: ClusterDockerIma
}
}
+
+export function clusterDockerImageBasicAuthToHclTerraform(struct?: ClusterDockerImageBasicAuthOutputReference | ClusterDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1367,6 +1664,31 @@ export function clusterDockerImageToTerraform(struct?: ClusterDockerImageOutputR
}
}
+
+export function clusterDockerImageToHclTerraform(struct?: ClusterDockerImageOutputReference | ClusterDockerImage): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: clusterDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1476,6 +1798,55 @@ export function clusterGcpAttributesToTerraform(struct?: ClusterGcpAttributesOut
}
}
+
+export function clusterGcpAttributesToHclTerraform(struct?: ClusterGcpAttributesOutputReference | ClusterGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ boot_disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.bootDiskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(struct!.googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ use_preemptible_executors: {
+ value: cdktf.booleanToHclTerraform(struct!.usePreemptibleExecutors),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1651,6 +2022,25 @@ export function clusterInitScriptsAbfssToTerraform(struct?: ClusterInitScriptsAb
}
}
+
+export function clusterInitScriptsAbfssToHclTerraform(struct?: ClusterInitScriptsAbfssOutputReference | ClusterInitScriptsAbfss): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterInitScriptsAbfssOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1716,6 +2106,25 @@ export function clusterInitScriptsDbfsToTerraform(struct?: ClusterInitScriptsDbf
}
}
+
+export function clusterInitScriptsDbfsToHclTerraform(struct?: ClusterInitScriptsDbfsOutputReference | ClusterInitScriptsDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterInitScriptsDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1778,6 +2187,25 @@ export function clusterInitScriptsFileToTerraform(struct?: ClusterInitScriptsFil
}
}
+
+export function clusterInitScriptsFileToHclTerraform(struct?: ClusterInitScriptsFileOutputReference | ClusterInitScriptsFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterInitScriptsFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1843,6 +2271,25 @@ export function clusterInitScriptsGcsToTerraform(struct?: ClusterInitScriptsGcsO
}
}
+
+export function clusterInitScriptsGcsToHclTerraform(struct?: ClusterInitScriptsGcsOutputReference | ClusterInitScriptsGcs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterInitScriptsGcsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1938,6 +2385,61 @@ export function clusterInitScriptsS3ToTerraform(struct?: ClusterInitScriptsS3Out
}
}
+
+export function clusterInitScriptsS3ToHclTerraform(struct?: ClusterInitScriptsS3OutputReference | ClusterInitScriptsS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterInitScriptsS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2132,6 +2634,25 @@ export function clusterInitScriptsVolumesToTerraform(struct?: ClusterInitScripts
}
}
+
+export function clusterInitScriptsVolumesToHclTerraform(struct?: ClusterInitScriptsVolumesOutputReference | ClusterInitScriptsVolumes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterInitScriptsVolumesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2197,6 +2718,25 @@ export function clusterInitScriptsWorkspaceToTerraform(struct?: ClusterInitScrip
}
}
+
+export function clusterInitScriptsWorkspaceToHclTerraform(struct?: ClusterInitScriptsWorkspaceOutputReference | ClusterInitScriptsWorkspace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterInitScriptsWorkspaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2306,6 +2846,61 @@ export function clusterInitScriptsToTerraform(struct?: ClusterInitScripts | cdkt
}
}
+
+export function clusterInitScriptsToHclTerraform(struct?: ClusterInitScripts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ abfss: {
+ value: clusterInitScriptsAbfssToHclTerraform(struct!.abfss),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterInitScriptsAbfssList",
+ },
+ dbfs: {
+ value: clusterInitScriptsDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterInitScriptsDbfsList",
+ },
+ file: {
+ value: clusterInitScriptsFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterInitScriptsFileList",
+ },
+ gcs: {
+ value: clusterInitScriptsGcsToHclTerraform(struct!.gcs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterInitScriptsGcsList",
+ },
+ s3: {
+ value: clusterInitScriptsS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterInitScriptsS3List",
+ },
+ volumes: {
+ value: clusterInitScriptsVolumesToHclTerraform(struct!.volumes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterInitScriptsVolumesList",
+ },
+ workspace: {
+ value: clusterInitScriptsWorkspaceToHclTerraform(struct!.workspace),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterInitScriptsWorkspaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterInitScriptsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -2540,6 +3135,31 @@ export function clusterLibraryCranToTerraform(struct?: ClusterLibraryCranOutputR
}
}
+
+export function clusterLibraryCranToHclTerraform(struct?: ClusterLibraryCranOutputReference | ClusterLibraryCran): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterLibraryCranOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2634,6 +3254,37 @@ export function clusterLibraryMavenToTerraform(struct?: ClusterLibraryMavenOutpu
}
}
+
+export function clusterLibraryMavenToHclTerraform(struct?: ClusterLibraryMavenOutputReference | ClusterLibraryMaven): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ coordinates: {
+ value: cdktf.stringToHclTerraform(struct!.coordinates),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ exclusions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.exclusions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterLibraryMavenOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2745,6 +3396,31 @@ export function clusterLibraryPypiToTerraform(struct?: ClusterLibraryPypiOutputR
}
}
+
+export function clusterLibraryPypiToHclTerraform(struct?: ClusterLibraryPypiOutputReference | ClusterLibraryPypi): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterLibraryPypiOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2860,6 +3536,55 @@ export function clusterLibraryToTerraform(struct?: ClusterLibrary | cdktf.IResol
}
}
+
+export function clusterLibraryToHclTerraform(struct?: ClusterLibrary | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ egg: {
+ value: cdktf.stringToHclTerraform(struct!.egg),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jar: {
+ value: cdktf.stringToHclTerraform(struct!.jar),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ whl: {
+ value: cdktf.stringToHclTerraform(struct!.whl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cran: {
+ value: clusterLibraryCranToHclTerraform(struct!.cran),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterLibraryCranList",
+ },
+ maven: {
+ value: clusterLibraryMavenToHclTerraform(struct!.maven),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterLibraryMavenList",
+ },
+ pypi: {
+ value: clusterLibraryPypiToHclTerraform(struct!.pypi),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterLibraryPypiList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterLibraryOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -3077,6 +3802,37 @@ export function clusterTimeoutsToTerraform(struct?: ClusterTimeouts | cdktf.IRes
}
}
+
+export function clusterTimeoutsToHclTerraform(struct?: ClusterTimeouts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ create: {
+ value: cdktf.stringToHclTerraform(struct!.create),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ delete: {
+ value: cdktf.stringToHclTerraform(struct!.delete),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ update: {
+ value: cdktf.stringToHclTerraform(struct!.update),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -3201,6 +3957,31 @@ export function clusterWorkloadTypeClientsToTerraform(struct?: ClusterWorkloadTy
}
}
+
+export function clusterWorkloadTypeClientsToHclTerraform(struct?: ClusterWorkloadTypeClientsOutputReference | ClusterWorkloadTypeClients): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jobs: {
+ value: cdktf.booleanToHclTerraform(struct!.jobs),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ notebooks: {
+ value: cdktf.booleanToHclTerraform(struct!.notebooks),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterWorkloadTypeClientsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3290,6 +4071,25 @@ export function clusterWorkloadTypeToTerraform(struct?: ClusterWorkloadTypeOutpu
}
}
+
+export function clusterWorkloadTypeToHclTerraform(struct?: ClusterWorkloadTypeOutputReference | ClusterWorkloadType): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ clients: {
+ value: clusterWorkloadTypeClientsToHclTerraform(struct!.clients),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterWorkloadTypeClientsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ClusterWorkloadTypeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -4026,4 +4826,216 @@ export class Cluster extends cdktf.TerraformResource {
workload_type: clusterWorkloadTypeToTerraform(this._workloadType.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ apply_policy_default_values: {
+ value: cdktf.booleanToHclTerraform(this._applyPolicyDefaultValues),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(this._autoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_name: {
+ value: cdktf.stringToHclTerraform(this._clusterName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ data_security_mode: {
+ value: cdktf.stringToHclTerraform(this._dataSecurityMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_instance_pool_id: {
+ value: cdktf.stringToHclTerraform(this._driverInstancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_node_type_id: {
+ value: cdktf.stringToHclTerraform(this._driverNodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(this._enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_local_disk_encryption: {
+ value: cdktf.booleanToHclTerraform(this._enableLocalDiskEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ idempotency_token: {
+ value: cdktf.stringToHclTerraform(this._idempotencyToken),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(this._instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ is_pinned: {
+ value: cdktf.booleanToHclTerraform(this._isPinned),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(this._nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_workers: {
+ value: cdktf.numberToHclTerraform(this._numWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ policy_id: {
+ value: cdktf.stringToHclTerraform(this._policyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ runtime_engine: {
+ value: cdktf.stringToHclTerraform(this._runtimeEngine),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ single_user_name: {
+ value: cdktf.stringToHclTerraform(this._singleUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._sparkConf),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_env_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._sparkEnvVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_version: {
+ value: cdktf.stringToHclTerraform(this._sparkVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ssh_public_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._sshPublicKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ autoscale: {
+ value: clusterAutoscaleToHclTerraform(this._autoscale.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterAutoscaleList",
+ },
+ aws_attributes: {
+ value: clusterAwsAttributesToHclTerraform(this._awsAttributes.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterAwsAttributesList",
+ },
+ azure_attributes: {
+ value: clusterAzureAttributesToHclTerraform(this._azureAttributes.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterAzureAttributesList",
+ },
+ cluster_log_conf: {
+ value: clusterClusterLogConfToHclTerraform(this._clusterLogConf.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterClusterLogConfList",
+ },
+ cluster_mount_info: {
+ value: cdktf.listMapperHcl(clusterClusterMountInfoToHclTerraform, true)(this._clusterMountInfo.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterClusterMountInfoList",
+ },
+ docker_image: {
+ value: clusterDockerImageToHclTerraform(this._dockerImage.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterDockerImageList",
+ },
+ gcp_attributes: {
+ value: clusterGcpAttributesToHclTerraform(this._gcpAttributes.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterGcpAttributesList",
+ },
+ init_scripts: {
+ value: cdktf.listMapperHcl(clusterInitScriptsToHclTerraform, true)(this._initScripts.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterInitScriptsList",
+ },
+ library: {
+ value: cdktf.listMapperHcl(clusterLibraryToHclTerraform, true)(this._library.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "ClusterLibraryList",
+ },
+ timeouts: {
+ value: clusterTimeoutsToHclTerraform(this._timeouts.internalValue),
+ isBlock: true,
+ type: "struct",
+ storageClassType: "ClusterTimeouts",
+ },
+ workload_type: {
+ value: clusterWorkloadTypeToHclTerraform(this._workloadType.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ClusterWorkloadTypeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/connection/README.md b/src/connection/README.md
index 2d6114796..b85de13ca 100644
--- a/src/connection/README.md
+++ b/src/connection/README.md
@@ -1,3 +1,3 @@
# `databricks_connection`
-Refer to the Terraform Registory for docs: [`databricks_connection`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/connection).
+Refer to the Terraform Registry for docs: [`databricks_connection`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/connection).
diff --git a/src/connection/index.ts b/src/connection/index.ts
index 336c20a79..87ab465e0 100644
--- a/src/connection/index.ts
+++ b/src/connection/index.ts
@@ -271,4 +271,66 @@ export class Connection extends cdktf.TerraformResource {
read_only: cdktf.booleanToTerraform(this._readOnly),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ connection_type: {
+ value: cdktf.stringToHclTerraform(this._connectionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ options: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._options),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ properties: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._properties),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ read_only: {
+ value: cdktf.booleanToHclTerraform(this._readOnly),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-aws-assume-role-policy/README.md b/src/data-databricks-aws-assume-role-policy/README.md
index 04d226ada..b609ca142 100644
--- a/src/data-databricks-aws-assume-role-policy/README.md
+++ b/src/data-databricks-aws-assume-role-policy/README.md
@@ -1,3 +1,3 @@
# `data_databricks_aws_assume_role_policy`
-Refer to the Terraform Registory for docs: [`data_databricks_aws_assume_role_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/aws_assume_role_policy).
+Refer to the Terraform Registry for docs: [`data_databricks_aws_assume_role_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/aws_assume_role_policy).
diff --git a/src/data-databricks-aws-assume-role-policy/index.ts b/src/data-databricks-aws-assume-role-policy/index.ts
index 844ce3b22..25aad62fe 100644
--- a/src/data-databricks-aws-assume-role-policy/index.ts
+++ b/src/data-databricks-aws-assume-role-policy/index.ts
@@ -174,4 +174,36 @@ export class DataDatabricksAwsAssumeRolePolicy extends cdktf.TerraformDataSource
id: cdktf.stringToTerraform(this._id),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ databricks_account_id: {
+ value: cdktf.stringToHclTerraform(this._databricksAccountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ external_id: {
+ value: cdktf.stringToHclTerraform(this._externalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ for_log_delivery: {
+ value: cdktf.booleanToHclTerraform(this._forLogDelivery),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-aws-bucket-policy/README.md b/src/data-databricks-aws-bucket-policy/README.md
index 96e128de0..3a1787fcf 100644
--- a/src/data-databricks-aws-bucket-policy/README.md
+++ b/src/data-databricks-aws-bucket-policy/README.md
@@ -1,3 +1,3 @@
# `data_databricks_aws_bucket_policy`
-Refer to the Terraform Registory for docs: [`data_databricks_aws_bucket_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/aws_bucket_policy).
+Refer to the Terraform Registry for docs: [`data_databricks_aws_bucket_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/aws_bucket_policy).
diff --git a/src/data-databricks-aws-bucket-policy/index.ts b/src/data-databricks-aws-bucket-policy/index.ts
index e3d78fe37..5627dc4a8 100644
--- a/src/data-databricks-aws-bucket-policy/index.ts
+++ b/src/data-databricks-aws-bucket-policy/index.ts
@@ -194,4 +194,42 @@ export class DataDatabricksAwsBucketPolicy extends cdktf.TerraformDataSource {
id: cdktf.stringToTerraform(this._id),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ bucket: {
+ value: cdktf.stringToHclTerraform(this._bucket),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ databricks_account_id: {
+ value: cdktf.stringToHclTerraform(this._databricksAccountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ databricks_e2_account_id: {
+ value: cdktf.stringToHclTerraform(this._databricksE2AccountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ full_access_role: {
+ value: cdktf.stringToHclTerraform(this._fullAccessRole),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-aws-crossaccount-policy/README.md b/src/data-databricks-aws-crossaccount-policy/README.md
index bd9b18f7c..d1e20fd76 100644
--- a/src/data-databricks-aws-crossaccount-policy/README.md
+++ b/src/data-databricks-aws-crossaccount-policy/README.md
@@ -1,3 +1,3 @@
# `data_databricks_aws_crossaccount_policy`
-Refer to the Terraform Registory for docs: [`data_databricks_aws_crossaccount_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/aws_crossaccount_policy).
+Refer to the Terraform Registry for docs: [`data_databricks_aws_crossaccount_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/aws_crossaccount_policy).
diff --git a/src/data-databricks-aws-crossaccount-policy/index.ts b/src/data-databricks-aws-crossaccount-policy/index.ts
index f2c9c063e..8f8ca5784 100644
--- a/src/data-databricks-aws-crossaccount-policy/index.ts
+++ b/src/data-databricks-aws-crossaccount-policy/index.ts
@@ -131,4 +131,24 @@ export class DataDatabricksAwsCrossaccountPolicy extends cdktf.TerraformDataSour
pass_roles: cdktf.listMapper(cdktf.stringToTerraform, false)(this._passRoles),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ pass_roles: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._passRoles),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-catalogs/README.md b/src/data-databricks-catalogs/README.md
index 650d70c46..eeb0ad099 100644
--- a/src/data-databricks-catalogs/README.md
+++ b/src/data-databricks-catalogs/README.md
@@ -1,3 +1,3 @@
# `data_databricks_catalogs`
-Refer to the Terraform Registory for docs: [`data_databricks_catalogs`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/catalogs).
+Refer to the Terraform Registry for docs: [`data_databricks_catalogs`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/catalogs).
diff --git a/src/data-databricks-catalogs/index.ts b/src/data-databricks-catalogs/index.ts
index e2e534d8d..abcb9ada1 100644
--- a/src/data-databricks-catalogs/index.ts
+++ b/src/data-databricks-catalogs/index.ts
@@ -126,4 +126,24 @@ export class DataDatabricksCatalogs extends cdktf.TerraformDataSource {
ids: cdktf.listMapper(cdktf.stringToTerraform, false)(this._ids),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._ids),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-cluster-policy/README.md b/src/data-databricks-cluster-policy/README.md
index 41bfe251f..6807f95a8 100644
--- a/src/data-databricks-cluster-policy/README.md
+++ b/src/data-databricks-cluster-policy/README.md
@@ -1,3 +1,3 @@
# `data_databricks_cluster_policy`
-Refer to the Terraform Registory for docs: [`data_databricks_cluster_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/cluster_policy).
+Refer to the Terraform Registry for docs: [`data_databricks_cluster_policy`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/cluster_policy).
diff --git a/src/data-databricks-cluster-policy/index.ts b/src/data-databricks-cluster-policy/index.ts
index bc9f5b175..44003cc0b 100644
--- a/src/data-databricks-cluster-policy/index.ts
+++ b/src/data-databricks-cluster-policy/index.ts
@@ -258,4 +258,60 @@ export class DataDatabricksClusterPolicy extends cdktf.TerraformDataSource {
policy_family_id: cdktf.stringToTerraform(this._policyFamilyId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ definition: {
+ value: cdktf.stringToHclTerraform(this._definition),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ is_default: {
+ value: cdktf.booleanToHclTerraform(this._isDefault),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ max_clusters_per_user: {
+ value: cdktf.numberToHclTerraform(this._maxClustersPerUser),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ policy_family_definition_overrides: {
+ value: cdktf.stringToHclTerraform(this._policyFamilyDefinitionOverrides),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ policy_family_id: {
+ value: cdktf.stringToHclTerraform(this._policyFamilyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-cluster/README.md b/src/data-databricks-cluster/README.md
index acf155da2..cf7159044 100644
--- a/src/data-databricks-cluster/README.md
+++ b/src/data-databricks-cluster/README.md
@@ -1,3 +1,3 @@
# `data_databricks_cluster`
-Refer to the Terraform Registory for docs: [`data_databricks_cluster`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/cluster).
+Refer to the Terraform Registry for docs: [`data_databricks_cluster`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/cluster).
diff --git a/src/data-databricks-cluster/index.ts b/src/data-databricks-cluster/index.ts
index 4e5d5b1cb..252b37085 100644
--- a/src/data-databricks-cluster/index.ts
+++ b/src/data-databricks-cluster/index.ts
@@ -56,6 +56,31 @@ export function dataDatabricksClusterClusterInfoAutoscaleToTerraform(struct?: Da
}
}
+
+export function dataDatabricksClusterClusterInfoAutoscaleToHclTerraform(struct?: DataDatabricksClusterClusterInfoAutoscaleOutputReference | DataDatabricksClusterClusterInfoAutoscale): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ max_workers: {
+ value: cdktf.numberToHclTerraform(struct!.maxWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_workers: {
+ value: cdktf.numberToHclTerraform(struct!.minWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoAutoscaleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -178,6 +203,67 @@ export function dataDatabricksClusterClusterInfoAwsAttributesToTerraform(struct?
}
}
+
+export function dataDatabricksClusterClusterInfoAwsAttributesToHclTerraform(struct?: DataDatabricksClusterClusterInfoAwsAttributesOutputReference | DataDatabricksClusterClusterInfoAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_count: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_size: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -407,6 +493,37 @@ export function dataDatabricksClusterClusterInfoAzureAttributesToTerraform(struc
}
}
+
+export function dataDatabricksClusterClusterInfoAzureAttributesToHclTerraform(struct?: DataDatabricksClusterClusterInfoAzureAttributesOutputReference | DataDatabricksClusterClusterInfoAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -516,6 +633,25 @@ export function dataDatabricksClusterClusterInfoClusterLogConfDbfsToTerraform(st
}
}
+
+export function dataDatabricksClusterClusterInfoClusterLogConfDbfsToHclTerraform(struct?: DataDatabricksClusterClusterInfoClusterLogConfDbfsOutputReference | DataDatabricksClusterClusterInfoClusterLogConfDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoClusterLogConfDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -608,6 +744,61 @@ export function dataDatabricksClusterClusterInfoClusterLogConfS3ToTerraform(stru
}
}
+
+export function dataDatabricksClusterClusterInfoClusterLogConfS3ToHclTerraform(struct?: DataDatabricksClusterClusterInfoClusterLogConfS3OutputReference | DataDatabricksClusterClusterInfoClusterLogConfS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoClusterLogConfS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -811,6 +1002,31 @@ export function dataDatabricksClusterClusterInfoClusterLogConfToTerraform(struct
}
}
+
+export function dataDatabricksClusterClusterInfoClusterLogConfToHclTerraform(struct?: DataDatabricksClusterClusterInfoClusterLogConfOutputReference | DataDatabricksClusterClusterInfoClusterLogConf): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dbfs: {
+ value: dataDatabricksClusterClusterInfoClusterLogConfDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoClusterLogConfDbfsList",
+ },
+ s3: {
+ value: dataDatabricksClusterClusterInfoClusterLogConfS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoClusterLogConfS3List",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoClusterLogConfOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -903,6 +1119,31 @@ export function dataDatabricksClusterClusterInfoClusterLogStatusToTerraform(stru
}
}
+
+export function dataDatabricksClusterClusterInfoClusterLogStatusToHclTerraform(struct?: DataDatabricksClusterClusterInfoClusterLogStatusOutputReference | DataDatabricksClusterClusterInfoClusterLogStatus): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ last_attempted: {
+ value: cdktf.numberToHclTerraform(struct!.lastAttempted),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ last_exception: {
+ value: cdktf.stringToHclTerraform(struct!.lastException),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoClusterLogStatusOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -995,6 +1236,31 @@ export function dataDatabricksClusterClusterInfoDockerImageBasicAuthToTerraform(
}
}
+
+export function dataDatabricksClusterClusterInfoDockerImageBasicAuthToHclTerraform(struct?: DataDatabricksClusterClusterInfoDockerImageBasicAuthOutputReference | DataDatabricksClusterClusterInfoDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1083,6 +1349,31 @@ export function dataDatabricksClusterClusterInfoDockerImageToTerraform(struct?:
}
}
+
+export function dataDatabricksClusterClusterInfoDockerImageToHclTerraform(struct?: DataDatabricksClusterClusterInfoDockerImageOutputReference | DataDatabricksClusterClusterInfoDockerImage): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: dataDatabricksClusterClusterInfoDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1167,6 +1458,25 @@ export function dataDatabricksClusterClusterInfoDriverNodeAwsAttributesToTerrafo
}
}
+
+export function dataDatabricksClusterClusterInfoDriverNodeAwsAttributesToHclTerraform(struct?: DataDatabricksClusterClusterInfoDriverNodeAwsAttributesOutputReference | DataDatabricksClusterClusterInfoDriverNodeAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ is_spot: {
+ value: cdktf.booleanToHclTerraform(struct!.isSpot),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoDriverNodeAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1264,6 +1574,61 @@ export function dataDatabricksClusterClusterInfoDriverToTerraform(struct?: DataD
}
}
+
+export function dataDatabricksClusterClusterInfoDriverToHclTerraform(struct?: DataDatabricksClusterClusterInfoDriverOutputReference | DataDatabricksClusterClusterInfoDriver): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ host_private_ip: {
+ value: cdktf.stringToHclTerraform(struct!.hostPrivateIp),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_id: {
+ value: cdktf.stringToHclTerraform(struct!.instanceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ node_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_ip: {
+ value: cdktf.stringToHclTerraform(struct!.privateIp),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ public_dns: {
+ value: cdktf.stringToHclTerraform(struct!.publicDns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ start_timestamp: {
+ value: cdktf.numberToHclTerraform(struct!.startTimestamp),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ node_aws_attributes: {
+ value: dataDatabricksClusterClusterInfoDriverNodeAwsAttributesToHclTerraform(struct!.nodeAwsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoDriverNodeAwsAttributesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoDriverOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1461,6 +1826,25 @@ export function dataDatabricksClusterClusterInfoExecutorsNodeAwsAttributesToTerr
}
}
+
+export function dataDatabricksClusterClusterInfoExecutorsNodeAwsAttributesToHclTerraform(struct?: DataDatabricksClusterClusterInfoExecutorsNodeAwsAttributesOutputReference | DataDatabricksClusterClusterInfoExecutorsNodeAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ is_spot: {
+ value: cdktf.booleanToHclTerraform(struct!.isSpot),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoExecutorsNodeAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1558,6 +1942,61 @@ export function dataDatabricksClusterClusterInfoExecutorsToTerraform(struct?: Da
}
}
+
+export function dataDatabricksClusterClusterInfoExecutorsToHclTerraform(struct?: DataDatabricksClusterClusterInfoExecutors | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ host_private_ip: {
+ value: cdktf.stringToHclTerraform(struct!.hostPrivateIp),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_id: {
+ value: cdktf.stringToHclTerraform(struct!.instanceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ node_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_ip: {
+ value: cdktf.stringToHclTerraform(struct!.privateIp),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ public_dns: {
+ value: cdktf.stringToHclTerraform(struct!.publicDns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ start_timestamp: {
+ value: cdktf.numberToHclTerraform(struct!.startTimestamp),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ node_aws_attributes: {
+ value: dataDatabricksClusterClusterInfoExecutorsNodeAwsAttributesToHclTerraform(struct!.nodeAwsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoExecutorsNodeAwsAttributesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoExecutorsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1812,6 +2251,55 @@ export function dataDatabricksClusterClusterInfoGcpAttributesToTerraform(struct?
}
}
+
+export function dataDatabricksClusterClusterInfoGcpAttributesToHclTerraform(struct?: DataDatabricksClusterClusterInfoGcpAttributesOutputReference | DataDatabricksClusterClusterInfoGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ boot_disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.bootDiskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(struct!.googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ use_preemptible_executors: {
+ value: cdktf.booleanToHclTerraform(struct!.usePreemptibleExecutors),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1987,6 +2475,25 @@ export function dataDatabricksClusterClusterInfoInitScriptsAbfssToTerraform(stru
}
}
+
+export function dataDatabricksClusterClusterInfoInitScriptsAbfssToHclTerraform(struct?: DataDatabricksClusterClusterInfoInitScriptsAbfssOutputReference | DataDatabricksClusterClusterInfoInitScriptsAbfss): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoInitScriptsAbfssOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2052,6 +2559,25 @@ export function dataDatabricksClusterClusterInfoInitScriptsDbfsToTerraform(struc
}
}
+
+export function dataDatabricksClusterClusterInfoInitScriptsDbfsToHclTerraform(struct?: DataDatabricksClusterClusterInfoInitScriptsDbfsOutputReference | DataDatabricksClusterClusterInfoInitScriptsDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoInitScriptsDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2114,6 +2640,25 @@ export function dataDatabricksClusterClusterInfoInitScriptsFileToTerraform(struc
}
}
+
+export function dataDatabricksClusterClusterInfoInitScriptsFileToHclTerraform(struct?: DataDatabricksClusterClusterInfoInitScriptsFileOutputReference | DataDatabricksClusterClusterInfoInitScriptsFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoInitScriptsFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2179,6 +2724,25 @@ export function dataDatabricksClusterClusterInfoInitScriptsGcsToTerraform(struct
}
}
+
+export function dataDatabricksClusterClusterInfoInitScriptsGcsToHclTerraform(struct?: DataDatabricksClusterClusterInfoInitScriptsGcsOutputReference | DataDatabricksClusterClusterInfoInitScriptsGcs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoInitScriptsGcsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2274,6 +2838,61 @@ export function dataDatabricksClusterClusterInfoInitScriptsS3ToTerraform(struct?
}
}
+
+export function dataDatabricksClusterClusterInfoInitScriptsS3ToHclTerraform(struct?: DataDatabricksClusterClusterInfoInitScriptsS3OutputReference | DataDatabricksClusterClusterInfoInitScriptsS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoInitScriptsS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2468,6 +3087,25 @@ export function dataDatabricksClusterClusterInfoInitScriptsVolumesToTerraform(st
}
}
+
+export function dataDatabricksClusterClusterInfoInitScriptsVolumesToHclTerraform(struct?: DataDatabricksClusterClusterInfoInitScriptsVolumesOutputReference | DataDatabricksClusterClusterInfoInitScriptsVolumes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoInitScriptsVolumesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2533,6 +3171,25 @@ export function dataDatabricksClusterClusterInfoInitScriptsWorkspaceToTerraform(
}
}
+
+export function dataDatabricksClusterClusterInfoInitScriptsWorkspaceToHclTerraform(struct?: DataDatabricksClusterClusterInfoInitScriptsWorkspaceOutputReference | DataDatabricksClusterClusterInfoInitScriptsWorkspace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoInitScriptsWorkspaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2642,6 +3299,61 @@ export function dataDatabricksClusterClusterInfoInitScriptsToTerraform(struct?:
}
}
+
+export function dataDatabricksClusterClusterInfoInitScriptsToHclTerraform(struct?: DataDatabricksClusterClusterInfoInitScripts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ abfss: {
+ value: dataDatabricksClusterClusterInfoInitScriptsAbfssToHclTerraform(struct!.abfss),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoInitScriptsAbfssList",
+ },
+ dbfs: {
+ value: dataDatabricksClusterClusterInfoInitScriptsDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoInitScriptsDbfsList",
+ },
+ file: {
+ value: dataDatabricksClusterClusterInfoInitScriptsFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoInitScriptsFileList",
+ },
+ gcs: {
+ value: dataDatabricksClusterClusterInfoInitScriptsGcsToHclTerraform(struct!.gcs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoInitScriptsGcsList",
+ },
+ s3: {
+ value: dataDatabricksClusterClusterInfoInitScriptsS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoInitScriptsS3List",
+ },
+ volumes: {
+ value: dataDatabricksClusterClusterInfoInitScriptsVolumesToHclTerraform(struct!.volumes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoInitScriptsVolumesList",
+ },
+ workspace: {
+ value: dataDatabricksClusterClusterInfoInitScriptsWorkspaceToHclTerraform(struct!.workspace),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoInitScriptsWorkspaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoInitScriptsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -2881,6 +3593,37 @@ export function dataDatabricksClusterClusterInfoTerminationReasonToTerraform(str
}
}
+
+export function dataDatabricksClusterClusterInfoTerminationReasonToHclTerraform(struct?: DataDatabricksClusterClusterInfoTerminationReasonOutputReference | DataDatabricksClusterClusterInfoTerminationReason): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ code: {
+ value: cdktf.stringToHclTerraform(struct!.code),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.parameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ type: {
+ value: cdktf.stringToHclTerraform(struct!.type),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoTerminationReasonOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3222,6 +3965,277 @@ export function dataDatabricksClusterClusterInfoToTerraform(struct?: DataDatabri
}
}
+
+export function dataDatabricksClusterClusterInfoToHclTerraform(struct?: DataDatabricksClusterClusterInfoOutputReference | DataDatabricksClusterClusterInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(struct!.autoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_cores: {
+ value: cdktf.numberToHclTerraform(struct!.clusterCores),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_memory_mb: {
+ value: cdktf.numberToHclTerraform(struct!.clusterMemoryMb),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_name: {
+ value: cdktf.stringToHclTerraform(struct!.clusterName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_source: {
+ value: cdktf.stringToHclTerraform(struct!.clusterSource),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ creator_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.creatorUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ data_security_mode: {
+ value: cdktf.stringToHclTerraform(struct!.dataSecurityMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ default_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.defaultTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ driver_instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverInstancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverNodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(struct!.enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_local_disk_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableLocalDiskEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jdbc_port: {
+ value: cdktf.numberToHclTerraform(struct!.jdbcPort),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ last_activity_time: {
+ value: cdktf.numberToHclTerraform(struct!.lastActivityTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ last_state_loss_time: {
+ value: cdktf.numberToHclTerraform(struct!.lastStateLossTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_workers: {
+ value: cdktf.numberToHclTerraform(struct!.numWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ policy_id: {
+ value: cdktf.stringToHclTerraform(struct!.policyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ runtime_engine: {
+ value: cdktf.stringToHclTerraform(struct!.runtimeEngine),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ single_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.singleUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkConf),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_context_id: {
+ value: cdktf.numberToHclTerraform(struct!.sparkContextId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spark_env_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkEnvVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_version: {
+ value: cdktf.stringToHclTerraform(struct!.sparkVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ssh_public_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.sshPublicKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ start_time: {
+ value: cdktf.numberToHclTerraform(struct!.startTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ state: {
+ value: cdktf.stringToHclTerraform(struct!.state),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ state_message: {
+ value: cdktf.stringToHclTerraform(struct!.stateMessage),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ terminate_time: {
+ value: cdktf.numberToHclTerraform(struct!.terminateTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ autoscale: {
+ value: dataDatabricksClusterClusterInfoAutoscaleToHclTerraform(struct!.autoscale),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoAutoscaleList",
+ },
+ aws_attributes: {
+ value: dataDatabricksClusterClusterInfoAwsAttributesToHclTerraform(struct!.awsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoAwsAttributesList",
+ },
+ azure_attributes: {
+ value: dataDatabricksClusterClusterInfoAzureAttributesToHclTerraform(struct!.azureAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoAzureAttributesList",
+ },
+ cluster_log_conf: {
+ value: dataDatabricksClusterClusterInfoClusterLogConfToHclTerraform(struct!.clusterLogConf),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoClusterLogConfList",
+ },
+ cluster_log_status: {
+ value: dataDatabricksClusterClusterInfoClusterLogStatusToHclTerraform(struct!.clusterLogStatus),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoClusterLogStatusList",
+ },
+ docker_image: {
+ value: dataDatabricksClusterClusterInfoDockerImageToHclTerraform(struct!.dockerImage),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoDockerImageList",
+ },
+ driver: {
+ value: dataDatabricksClusterClusterInfoDriverToHclTerraform(struct!.driver),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoDriverList",
+ },
+ executors: {
+ value: cdktf.listMapperHcl(dataDatabricksClusterClusterInfoExecutorsToHclTerraform, true)(struct!.executors),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoExecutorsList",
+ },
+ gcp_attributes: {
+ value: dataDatabricksClusterClusterInfoGcpAttributesToHclTerraform(struct!.gcpAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoGcpAttributesList",
+ },
+ init_scripts: {
+ value: cdktf.listMapperHcl(dataDatabricksClusterClusterInfoInitScriptsToHclTerraform, true)(struct!.initScripts),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoInitScriptsList",
+ },
+ termination_reason: {
+ value: dataDatabricksClusterClusterInfoTerminationReasonToHclTerraform(struct!.terminationReason),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoTerminationReasonList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksClusterClusterInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -4323,4 +5337,36 @@ export class DataDatabricksCluster extends cdktf.TerraformDataSource {
cluster_info: dataDatabricksClusterClusterInfoToTerraform(this._clusterInfo.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_name: {
+ value: cdktf.stringToHclTerraform(this._clusterName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_info: {
+ value: dataDatabricksClusterClusterInfoToHclTerraform(this._clusterInfo.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksClusterClusterInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-clusters/README.md b/src/data-databricks-clusters/README.md
index 448b989d7..8bb7f0395 100644
--- a/src/data-databricks-clusters/README.md
+++ b/src/data-databricks-clusters/README.md
@@ -1,3 +1,3 @@
# `data_databricks_clusters`
-Refer to the Terraform Registory for docs: [`data_databricks_clusters`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/clusters).
+Refer to the Terraform Registry for docs: [`data_databricks_clusters`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/clusters).
diff --git a/src/data-databricks-clusters/index.ts b/src/data-databricks-clusters/index.ts
index 6b243dbee..4977342b9 100644
--- a/src/data-databricks-clusters/index.ts
+++ b/src/data-databricks-clusters/index.ts
@@ -131,4 +131,24 @@ export class DataDatabricksClusters extends cdktf.TerraformDataSource {
id: cdktf.stringToTerraform(this._id),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ cluster_name_contains: {
+ value: cdktf.stringToHclTerraform(this._clusterNameContains),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-current-config/README.md b/src/data-databricks-current-config/README.md
index 06c28def9..6ede7489e 100644
--- a/src/data-databricks-current-config/README.md
+++ b/src/data-databricks-current-config/README.md
@@ -1,3 +1,3 @@
# `data_databricks_current_config`
-Refer to the Terraform Registory for docs: [`data_databricks_current_config`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/current_config).
+Refer to the Terraform Registry for docs: [`data_databricks_current_config`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/current_config).
diff --git a/src/data-databricks-current-config/index.ts b/src/data-databricks-current-config/index.ts
index 36bc5f107..f7880c63c 100644
--- a/src/data-databricks-current-config/index.ts
+++ b/src/data-databricks-current-config/index.ts
@@ -214,4 +214,48 @@ export class DataDatabricksCurrentConfig extends cdktf.TerraformDataSource {
is_account: cdktf.booleanToTerraform(this._isAccount),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ auth_type: {
+ value: cdktf.stringToHclTerraform(this._authType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cloud_type: {
+ value: cdktf.stringToHclTerraform(this._cloudType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ host: {
+ value: cdktf.stringToHclTerraform(this._host),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ is_account: {
+ value: cdktf.booleanToHclTerraform(this._isAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-current-user/README.md b/src/data-databricks-current-user/README.md
index b42fa40cf..97f81a964 100644
--- a/src/data-databricks-current-user/README.md
+++ b/src/data-databricks-current-user/README.md
@@ -1,3 +1,3 @@
# `data_databricks_current_user`
-Refer to the Terraform Registory for docs: [`data_databricks_current_user`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/current_user).
+Refer to the Terraform Registry for docs: [`data_databricks_current_user`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/current_user).
diff --git a/src/data-databricks-current-user/index.ts b/src/data-databricks-current-user/index.ts
index 0cf1db025..84f63232e 100644
--- a/src/data-databricks-current-user/index.ts
+++ b/src/data-databricks-current-user/index.ts
@@ -139,4 +139,18 @@ export class DataDatabricksCurrentUser extends cdktf.TerraformDataSource {
id: cdktf.stringToTerraform(this._id),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-dbfs-file-paths/README.md b/src/data-databricks-dbfs-file-paths/README.md
index 108233d5a..7a1f8c1eb 100644
--- a/src/data-databricks-dbfs-file-paths/README.md
+++ b/src/data-databricks-dbfs-file-paths/README.md
@@ -1,3 +1,3 @@
# `data_databricks_dbfs_file_paths`
-Refer to the Terraform Registory for docs: [`data_databricks_dbfs_file_paths`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/dbfs_file_paths).
+Refer to the Terraform Registry for docs: [`data_databricks_dbfs_file_paths`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/dbfs_file_paths).
diff --git a/src/data-databricks-dbfs-file-paths/index.ts b/src/data-databricks-dbfs-file-paths/index.ts
index 14e24ac6a..06ca5b452 100644
--- a/src/data-databricks-dbfs-file-paths/index.ts
+++ b/src/data-databricks-dbfs-file-paths/index.ts
@@ -40,6 +40,17 @@ export function dataDatabricksDbfsFilePathsPathListStructToTerraform(struct?: Da
}
}
+
+export function dataDatabricksDbfsFilePathsPathListStructToHclTerraform(struct?: DataDatabricksDbfsFilePathsPathListStruct): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ };
+ return attrs;
+}
+
export class DataDatabricksDbfsFilePathsPathListStructOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -217,4 +228,30 @@ export class DataDatabricksDbfsFilePaths extends cdktf.TerraformDataSource {
recursive: cdktf.booleanToTerraform(this._recursive),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ recursive: {
+ value: cdktf.booleanToHclTerraform(this._recursive),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-dbfs-file/README.md b/src/data-databricks-dbfs-file/README.md
index 3f7ae1263..127907ef7 100644
--- a/src/data-databricks-dbfs-file/README.md
+++ b/src/data-databricks-dbfs-file/README.md
@@ -1,3 +1,3 @@
# `data_databricks_dbfs_file`
-Refer to the Terraform Registory for docs: [`data_databricks_dbfs_file`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/dbfs_file).
+Refer to the Terraform Registry for docs: [`data_databricks_dbfs_file`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/dbfs_file).
diff --git a/src/data-databricks-dbfs-file/index.ts b/src/data-databricks-dbfs-file/index.ts
index f7ac4cded..488f1d2ae 100644
--- a/src/data-databricks-dbfs-file/index.ts
+++ b/src/data-databricks-dbfs-file/index.ts
@@ -152,4 +152,30 @@ export class DataDatabricksDbfsFile extends cdktf.TerraformDataSource {
path: cdktf.stringToTerraform(this._path),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ limit_file_size: {
+ value: cdktf.booleanToHclTerraform(this._limitFileSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-directory/README.md b/src/data-databricks-directory/README.md
index 34fcd6805..901073e57 100644
--- a/src/data-databricks-directory/README.md
+++ b/src/data-databricks-directory/README.md
@@ -1,3 +1,3 @@
# `data_databricks_directory`
-Refer to the Terraform Registory for docs: [`data_databricks_directory`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/directory).
+Refer to the Terraform Registry for docs: [`data_databricks_directory`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/directory).
diff --git a/src/data-databricks-directory/index.ts b/src/data-databricks-directory/index.ts
index e7bde9503..46a9c2f40 100644
--- a/src/data-databricks-directory/index.ts
+++ b/src/data-databricks-directory/index.ts
@@ -145,4 +145,30 @@ export class DataDatabricksDirectory extends cdktf.TerraformDataSource {
path: cdktf.stringToTerraform(this._path),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ object_id: {
+ value: cdktf.numberToHclTerraform(this._objectId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-group/README.md b/src/data-databricks-group/README.md
index 080aef704..3b62fbc53 100644
--- a/src/data-databricks-group/README.md
+++ b/src/data-databricks-group/README.md
@@ -1,3 +1,3 @@
# `data_databricks_group`
-Refer to the Terraform Registory for docs: [`data_databricks_group`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/group).
+Refer to the Terraform Registry for docs: [`data_databricks_group`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/group).
diff --git a/src/data-databricks-group/index.ts b/src/data-databricks-group/index.ts
index a9a9cff1d..0b24e4801 100644
--- a/src/data-databricks-group/index.ts
+++ b/src/data-databricks-group/index.ts
@@ -409,4 +409,102 @@ export class DataDatabricksGroup extends cdktf.TerraformDataSource {
workspace_access: cdktf.booleanToTerraform(this._workspaceAccess),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ acl_principal_id: {
+ value: cdktf.stringToHclTerraform(this._aclPrincipalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ allow_cluster_create: {
+ value: cdktf.booleanToHclTerraform(this._allowClusterCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ allow_instance_pool_create: {
+ value: cdktf.booleanToHclTerraform(this._allowInstancePoolCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ child_groups: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._childGroups),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ databricks_sql_access: {
+ value: cdktf.booleanToHclTerraform(this._databricksSqlAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ display_name: {
+ value: cdktf.stringToHclTerraform(this._displayName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ external_id: {
+ value: cdktf.stringToHclTerraform(this._externalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ groups: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._groups),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profiles: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._instanceProfiles),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ members: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._members),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ recursive: {
+ value: cdktf.booleanToHclTerraform(this._recursive),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ service_principals: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._servicePrincipals),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ users: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._users),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ workspace_access: {
+ value: cdktf.booleanToHclTerraform(this._workspaceAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-instance-pool/README.md b/src/data-databricks-instance-pool/README.md
index f8a1764f0..47673b65e 100644
--- a/src/data-databricks-instance-pool/README.md
+++ b/src/data-databricks-instance-pool/README.md
@@ -1,3 +1,3 @@
# `data_databricks_instance_pool`
-Refer to the Terraform Registory for docs: [`data_databricks_instance_pool`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/instance_pool).
+Refer to the Terraform Registry for docs: [`data_databricks_instance_pool`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/instance_pool).
diff --git a/src/data-databricks-instance-pool/index.ts b/src/data-databricks-instance-pool/index.ts
index 3558a8634..c26e30d66 100644
--- a/src/data-databricks-instance-pool/index.ts
+++ b/src/data-databricks-instance-pool/index.ts
@@ -57,6 +57,37 @@ export function dataDatabricksInstancePoolPoolInfoAwsAttributesToTerraform(struc
}
}
+
+export function dataDatabricksInstancePoolPoolInfoAwsAttributesToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoAwsAttributesOutputReference | DataDatabricksInstancePoolPoolInfoAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -171,6 +202,31 @@ export function dataDatabricksInstancePoolPoolInfoAzureAttributesToTerraform(str
}
}
+
+export function dataDatabricksInstancePoolPoolInfoAzureAttributesToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoAzureAttributesOutputReference | DataDatabricksInstancePoolPoolInfoAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -263,6 +319,31 @@ export function dataDatabricksInstancePoolPoolInfoDiskSpecDiskTypeToTerraform(st
}
}
+
+export function dataDatabricksInstancePoolPoolInfoDiskSpecDiskTypeToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoDiskSpecDiskTypeOutputReference | DataDatabricksInstancePoolPoolInfoDiskSpecDiskType): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ azure_disk_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.azureDiskVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoDiskSpecDiskTypeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -362,6 +443,37 @@ export function dataDatabricksInstancePoolPoolInfoDiskSpecToTerraform(struct?: D
}
}
+
+export function dataDatabricksInstancePoolPoolInfoDiskSpecToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoDiskSpecOutputReference | DataDatabricksInstancePoolPoolInfoDiskSpec): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ disk_count: {
+ value: cdktf.numberToHclTerraform(struct!.diskCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.diskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ disk_type: {
+ value: dataDatabricksInstancePoolPoolInfoDiskSpecDiskTypeToHclTerraform(struct!.diskType),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoDiskSpecDiskTypeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoDiskSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -476,6 +588,31 @@ export function dataDatabricksInstancePoolPoolInfoGcpAttributesToTerraform(struc
}
}
+
+export function dataDatabricksInstancePoolPoolInfoGcpAttributesToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoGcpAttributesOutputReference | DataDatabricksInstancePoolPoolInfoGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ gcp_availability: {
+ value: cdktf.stringToHclTerraform(struct!.gcpAvailability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -568,6 +705,31 @@ export function dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFle
}
}
+
+export function dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOptionToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOptionOutputReference | DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOption): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ allocation_strategy: {
+ value: cdktf.stringToHclTerraform(struct!.allocationStrategy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pools_to_use_count: {
+ value: cdktf.numberToHclTerraform(struct!.instancePoolsToUseCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOptionOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -657,6 +819,31 @@ export function dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFle
}
}
+
+export function dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOptionToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOptionOutputReference | DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOption): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ allocation_strategy: {
+ value: cdktf.stringToHclTerraform(struct!.allocationStrategy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pools_to_use_count: {
+ value: cdktf.numberToHclTerraform(struct!.instancePoolsToUseCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOptionOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -746,6 +933,31 @@ export function dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLau
}
}
+
+export function dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverrideToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverride | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability_zone: {
+ value: cdktf.stringToHclTerraform(struct!.availabilityZone),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_type: {
+ value: cdktf.stringToHclTerraform(struct!.instanceType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverrideOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -875,6 +1087,37 @@ export function dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesToT
}
}
+
+export function dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributes | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ fleet_on_demand_option: {
+ value: dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOptionToHclTerraform(struct!.fleetOnDemandOption),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOptionList",
+ },
+ fleet_spot_option: {
+ value: dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOptionToHclTerraform(struct!.fleetSpotOption),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesFleetSpotOptionList",
+ },
+ launch_template_override: {
+ value: cdktf.listMapperHcl(dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverrideToHclTerraform, true)(struct!.launchTemplateOverride),
+ isBlock: true,
+ type: "set",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesLaunchTemplateOverrideList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1018,6 +1261,31 @@ export function dataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuthT
}
}
+
+export function dataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuthToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuthOutputReference | DataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1106,6 +1374,31 @@ export function dataDatabricksInstancePoolPoolInfoPreloadedDockerImageToTerrafor
}
}
+
+export function dataDatabricksInstancePoolPoolInfoPreloadedDockerImageToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoPreloadedDockerImage | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: dataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoPreloadedDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoPreloadedDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1237,6 +1530,43 @@ export function dataDatabricksInstancePoolPoolInfoStatsToTerraform(struct?: Data
}
}
+
+export function dataDatabricksInstancePoolPoolInfoStatsToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoStatsOutputReference | DataDatabricksInstancePoolPoolInfoStats): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ idle_count: {
+ value: cdktf.numberToHclTerraform(struct!.idleCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ pending_idle_count: {
+ value: cdktf.numberToHclTerraform(struct!.pendingIdleCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ pending_used_count: {
+ value: cdktf.numberToHclTerraform(struct!.pendingUsedCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ used_count: {
+ value: cdktf.numberToHclTerraform(struct!.usedCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoStatsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1467,6 +1797,127 @@ export function dataDatabricksInstancePoolPoolInfoToTerraform(struct?: DataDatab
}
}
+
+export function dataDatabricksInstancePoolPoolInfoToHclTerraform(struct?: DataDatabricksInstancePoolPoolInfoOutputReference | DataDatabricksInstancePoolPoolInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ default_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.defaultTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(struct!.enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ idle_instance_autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(struct!.idleInstanceAutoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_name: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ max_capacity: {
+ value: cdktf.numberToHclTerraform(struct!.maxCapacity),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_idle_instances: {
+ value: cdktf.numberToHclTerraform(struct!.minIdleInstances),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ preloaded_spark_versions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.preloadedSparkVersions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ state: {
+ value: cdktf.stringToHclTerraform(struct!.state),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ aws_attributes: {
+ value: dataDatabricksInstancePoolPoolInfoAwsAttributesToHclTerraform(struct!.awsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoAwsAttributesList",
+ },
+ azure_attributes: {
+ value: dataDatabricksInstancePoolPoolInfoAzureAttributesToHclTerraform(struct!.azureAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoAzureAttributesList",
+ },
+ disk_spec: {
+ value: dataDatabricksInstancePoolPoolInfoDiskSpecToHclTerraform(struct!.diskSpec),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoDiskSpecList",
+ },
+ gcp_attributes: {
+ value: dataDatabricksInstancePoolPoolInfoGcpAttributesToHclTerraform(struct!.gcpAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoGcpAttributesList",
+ },
+ instance_pool_fleet_attributes: {
+ value: cdktf.listMapperHcl(dataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesToHclTerraform, true)(struct!.instancePoolFleetAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoInstancePoolFleetAttributesList",
+ },
+ preloaded_docker_image: {
+ value: cdktf.listMapperHcl(dataDatabricksInstancePoolPoolInfoPreloadedDockerImageToHclTerraform, true)(struct!.preloadedDockerImage),
+ isBlock: true,
+ type: "set",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoPreloadedDockerImageList",
+ },
+ stats: {
+ value: dataDatabricksInstancePoolPoolInfoStatsToHclTerraform(struct!.stats),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoStatsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstancePoolPoolInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2000,4 +2451,30 @@ export class DataDatabricksInstancePool extends cdktf.TerraformDataSource {
pool_info: dataDatabricksInstancePoolPoolInfoToTerraform(this._poolInfo.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ pool_info: {
+ value: dataDatabricksInstancePoolPoolInfoToHclTerraform(this._poolInfo.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstancePoolPoolInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-instance-profiles/README.md b/src/data-databricks-instance-profiles/README.md
index 9c3ea0354..790bd9a5b 100644
--- a/src/data-databricks-instance-profiles/README.md
+++ b/src/data-databricks-instance-profiles/README.md
@@ -1,3 +1,3 @@
# `data_databricks_instance_profiles`
-Refer to the Terraform Registory for docs: [`data_databricks_instance_profiles`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/instance_profiles).
+Refer to the Terraform Registry for docs: [`data_databricks_instance_profiles`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/instance_profiles).
diff --git a/src/data-databricks-instance-profiles/index.ts b/src/data-databricks-instance-profiles/index.ts
index 1ba0f6ffc..9f6f06125 100644
--- a/src/data-databricks-instance-profiles/index.ts
+++ b/src/data-databricks-instance-profiles/index.ts
@@ -58,6 +58,43 @@ export function dataDatabricksInstanceProfilesInstanceProfilesToTerraform(struct
}
}
+
+export function dataDatabricksInstanceProfilesInstanceProfilesToHclTerraform(struct?: DataDatabricksInstanceProfilesInstanceProfiles | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ arn: {
+ value: cdktf.stringToHclTerraform(struct!.arn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ is_meta: {
+ value: cdktf.booleanToHclTerraform(struct!.isMeta),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ role_arn: {
+ value: cdktf.stringToHclTerraform(struct!.roleArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksInstanceProfilesInstanceProfilesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -306,4 +343,24 @@ export class DataDatabricksInstanceProfiles extends cdktf.TerraformDataSource {
instance_profiles: cdktf.listMapper(dataDatabricksInstanceProfilesInstanceProfilesToTerraform, true)(this._instanceProfiles.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profiles: {
+ value: cdktf.listMapperHcl(dataDatabricksInstanceProfilesInstanceProfilesToHclTerraform, true)(this._instanceProfiles.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksInstanceProfilesInstanceProfilesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-job/README.md b/src/data-databricks-job/README.md
index ab8833fc8..e642fe040 100644
--- a/src/data-databricks-job/README.md
+++ b/src/data-databricks-job/README.md
@@ -1,3 +1,3 @@
# `data_databricks_job`
-Refer to the Terraform Registory for docs: [`data_databricks_job`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/job).
+Refer to the Terraform Registry for docs: [`data_databricks_job`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/job).
diff --git a/src/data-databricks-job/index-structs/structs0.ts b/src/data-databricks-job/index-structs/structs0.ts
index 394e1e4e9..8ce8eeb5c 100644
--- a/src/data-databricks-job/index-structs/structs0.ts
+++ b/src/data-databricks-job/index-structs/structs0.ts
@@ -21,6 +21,25 @@ export function dataDatabricksJobJobSettingsSettingsComputeSpecToTerraform(struc
}
}
+
+export function dataDatabricksJobJobSettingsSettingsComputeSpecToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsComputeSpecOutputReference | DataDatabricksJobJobSettingsSettingsComputeSpec): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ kind: {
+ value: cdktf.stringToHclTerraform(struct!.kind),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsComputeSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -93,6 +112,31 @@ export function dataDatabricksJobJobSettingsSettingsComputeToTerraform(struct?:
}
}
+
+export function dataDatabricksJobJobSettingsSettingsComputeToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsCompute | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ compute_key: {
+ value: cdktf.stringToHclTerraform(struct!.computeKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spec: {
+ value: dataDatabricksJobJobSettingsSettingsComputeSpecToHclTerraform(struct!.spec),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsComputeSpecList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsComputeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -212,6 +256,25 @@ export function dataDatabricksJobJobSettingsSettingsContinuousToTerraform(struct
}
}
+
+export function dataDatabricksJobJobSettingsSettingsContinuousToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsContinuousOutputReference | DataDatabricksJobJobSettingsSettingsContinuous): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ pause_status: {
+ value: cdktf.stringToHclTerraform(struct!.pauseStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsContinuousOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -302,6 +365,55 @@ export function dataDatabricksJobJobSettingsSettingsDbtTaskToTerraform(struct?:
}
}
+
+export function dataDatabricksJobJobSettingsSettingsDbtTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsDbtTaskOutputReference | DataDatabricksJobJobSettingsSettingsDbtTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ catalog: {
+ value: cdktf.stringToHclTerraform(struct!.catalog),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ commands: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.commands),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ profiles_directory: {
+ value: cdktf.stringToHclTerraform(struct!.profilesDirectory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ project_directory: {
+ value: cdktf.stringToHclTerraform(struct!.projectDirectory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ schema: {
+ value: cdktf.stringToHclTerraform(struct!.schema),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ warehouse_id: {
+ value: cdktf.stringToHclTerraform(struct!.warehouseId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsDbtTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -479,6 +591,31 @@ export function dataDatabricksJobJobSettingsSettingsDeploymentToTerraform(struct
}
}
+
+export function dataDatabricksJobJobSettingsSettingsDeploymentToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsDeploymentOutputReference | DataDatabricksJobJobSettingsSettingsDeployment): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ kind: {
+ value: cdktf.stringToHclTerraform(struct!.kind),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metadata_file_path: {
+ value: cdktf.stringToHclTerraform(struct!.metadataFilePath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsDeploymentOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -583,6 +720,49 @@ export function dataDatabricksJobJobSettingsSettingsEmailNotificationsToTerrafor
}
}
+
+export function dataDatabricksJobJobSettingsSettingsEmailNotificationsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsEmailNotificationsOutputReference | DataDatabricksJobJobSettingsSettingsEmailNotifications): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ no_alert_for_skipped_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForSkippedRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ on_duration_warning_threshold_exceeded: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onDurationWarningThresholdExceeded),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_failure: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onFailure),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_start: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onStart),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_success: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onSuccess),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsEmailNotificationsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -746,6 +926,37 @@ export function dataDatabricksJobJobSettingsSettingsGitSourceJobSourceToTerrafor
}
}
+
+export function dataDatabricksJobJobSettingsSettingsGitSourceJobSourceToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsGitSourceJobSourceOutputReference | DataDatabricksJobJobSettingsSettingsGitSourceJobSource): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dirty_state: {
+ value: cdktf.stringToHclTerraform(struct!.dirtyState),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ import_from_git_branch: {
+ value: cdktf.stringToHclTerraform(struct!.importFromGitBranch),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_config_path: {
+ value: cdktf.stringToHclTerraform(struct!.jobConfigPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsGitSourceJobSourceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -876,6 +1087,55 @@ export function dataDatabricksJobJobSettingsSettingsGitSourceToTerraform(struct?
}
}
+
+export function dataDatabricksJobJobSettingsSettingsGitSourceToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsGitSourceOutputReference | DataDatabricksJobJobSettingsSettingsGitSource): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ branch: {
+ value: cdktf.stringToHclTerraform(struct!.branch),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ commit: {
+ value: cdktf.stringToHclTerraform(struct!.commit),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ provider: {
+ value: cdktf.stringToHclTerraform(struct!.provider),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tag: {
+ value: cdktf.stringToHclTerraform(struct!.tag),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_source: {
+ value: dataDatabricksJobJobSettingsSettingsGitSourceJobSourceToHclTerraform(struct!.jobSource),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsGitSourceJobSourceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsGitSourceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1058,6 +1318,37 @@ export function dataDatabricksJobJobSettingsSettingsHealthRulesToTerraform(struc
}
}
+
+export function dataDatabricksJobJobSettingsSettingsHealthRulesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsHealthRules | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ metric: {
+ value: cdktf.stringToHclTerraform(struct!.metric),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ op: {
+ value: cdktf.stringToHclTerraform(struct!.op),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.numberToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsHealthRulesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1201,6 +1492,25 @@ export function dataDatabricksJobJobSettingsSettingsHealthToTerraform(struct?: D
}
}
+
+export function dataDatabricksJobJobSettingsSettingsHealthToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsHealthOutputReference | DataDatabricksJobJobSettingsSettingsHealth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ rules: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsHealthRulesToHclTerraform, true)(struct!.rules),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsHealthRulesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsHealthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1268,6 +1578,31 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscal
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscaleToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscaleOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscale): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ max_workers: {
+ value: cdktf.numberToHclTerraform(struct!.maxWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_workers: {
+ value: cdktf.numberToHclTerraform(struct!.minWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscaleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1390,6 +1725,67 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributesOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_count: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_size: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1619,6 +2015,37 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAtt
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributesOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1728,6 +2155,25 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterL
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfsOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1820,6 +2266,61 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterL
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3ToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3OutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2023,6 +2524,31 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterL
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConf): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dbfs: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfsList",
+ },
+ s3: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3List",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2115,6 +2641,31 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterM
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ mount_options: {
+ value: cdktf.stringToHclTerraform(struct!.mountOptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ server_address: {
+ value: cdktf.stringToHclTerraform(struct!.serverAddress),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2211,6 +2762,37 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterM
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfo | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ local_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.localMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ remote_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.remoteMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_filesystem_info: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct!.networkFilesystemInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -2351,6 +2933,31 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerIm
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuthToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuthOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2439,6 +3046,31 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerIm
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImage): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2548,6 +3180,55 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributesOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ boot_disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.bootDiskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(struct!.googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ use_preemptible_executors: {
+ value: cdktf.booleanToHclTerraform(struct!.usePreemptibleExecutors),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2723,6 +3404,25 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfssToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfssOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfss): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfssOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2788,6 +3488,25 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfsOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2850,6 +3569,25 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFileToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFileOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2915,6 +3653,25 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcsOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3010,6 +3767,61 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3ToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3OutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3204,6 +4016,25 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumesOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3269,6 +4100,25 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspaceToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspaceOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3378,6 +4228,61 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScri
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScripts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ abfss: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfssToHclTerraform(struct!.abfss),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfssList",
+ },
+ dbfs: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfsList",
+ },
+ file: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsFileList",
+ },
+ gcs: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcsToHclTerraform(struct!.gcs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcsList",
+ },
+ s3: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3List",
+ },
+ volumes: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumesToHclTerraform(struct!.volumes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsVolumesList",
+ },
+ workspace: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspaceToHclTerraform(struct!.workspace),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -3612,6 +4517,31 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkload
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClientsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClientsOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClients): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jobs: {
+ value: cdktf.booleanToHclTerraform(struct!.jobs),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ notebooks: {
+ value: cdktf.booleanToHclTerraform(struct!.notebooks),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClientsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3701,6 +4631,25 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkload
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadType): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ clients: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClientsToHclTerraform(struct!.clients),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClientsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3926,6 +4875,199 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterToTerraf
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterNewClusterToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobClusterNewClusterOutputReference | DataDatabricksJobJobSettingsSettingsJobClusterNewCluster): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ apply_policy_default_values: {
+ value: cdktf.booleanToHclTerraform(struct!.applyPolicyDefaultValues),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(struct!.autoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_name: {
+ value: cdktf.stringToHclTerraform(struct!.clusterName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ data_security_mode: {
+ value: cdktf.stringToHclTerraform(struct!.dataSecurityMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverInstancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverNodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(struct!.enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_local_disk_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableLocalDiskEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ idempotency_token: {
+ value: cdktf.stringToHclTerraform(struct!.idempotencyToken),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_workers: {
+ value: cdktf.numberToHclTerraform(struct!.numWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ policy_id: {
+ value: cdktf.stringToHclTerraform(struct!.policyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ runtime_engine: {
+ value: cdktf.stringToHclTerraform(struct!.runtimeEngine),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ single_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.singleUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkConf),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_env_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkEnvVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_version: {
+ value: cdktf.stringToHclTerraform(struct!.sparkVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ssh_public_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.sshPublicKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ autoscale: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscaleToHclTerraform(struct!.autoscale),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAutoscaleList",
+ },
+ aws_attributes: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributesToHclTerraform(struct!.awsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAwsAttributesList",
+ },
+ azure_attributes: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributesToHclTerraform(struct!.azureAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterAzureAttributesList",
+ },
+ cluster_log_conf: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfToHclTerraform(struct!.clusterLogConf),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterLogConfList",
+ },
+ cluster_mount_info: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoToHclTerraform, true)(struct!.clusterMountInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoList",
+ },
+ docker_image: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageToHclTerraform(struct!.dockerImage),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterDockerImageList",
+ },
+ gcp_attributes: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributesToHclTerraform(struct!.gcpAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterGcpAttributesList",
+ },
+ init_scripts: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsToHclTerraform, true)(struct!.initScripts),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterInitScriptsList",
+ },
+ workload_type: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeToHclTerraform(struct!.workloadType),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterNewClusterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -4630,6 +5772,31 @@ export function dataDatabricksJobJobSettingsSettingsJobClusterToTerraform(struct
}
}
+
+export function dataDatabricksJobJobSettingsSettingsJobClusterToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsJobCluster | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ job_cluster_key: {
+ value: cdktf.stringToHclTerraform(struct!.jobClusterKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ new_cluster: {
+ value: dataDatabricksJobJobSettingsSettingsJobClusterNewClusterToHclTerraform(struct!.newCluster),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterNewClusterList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsJobClusterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -4754,6 +5921,31 @@ export function dataDatabricksJobJobSettingsSettingsLibraryCranToTerraform(struc
}
}
+
+export function dataDatabricksJobJobSettingsSettingsLibraryCranToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsLibraryCranOutputReference | DataDatabricksJobJobSettingsSettingsLibraryCran): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsLibraryCranOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -4848,6 +6040,37 @@ export function dataDatabricksJobJobSettingsSettingsLibraryMavenToTerraform(stru
}
}
+
+export function dataDatabricksJobJobSettingsSettingsLibraryMavenToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsLibraryMavenOutputReference | DataDatabricksJobJobSettingsSettingsLibraryMaven): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ coordinates: {
+ value: cdktf.stringToHclTerraform(struct!.coordinates),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ exclusions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.exclusions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsLibraryMavenOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -4959,6 +6182,31 @@ export function dataDatabricksJobJobSettingsSettingsLibraryPypiToTerraform(struc
}
}
+
+export function dataDatabricksJobJobSettingsSettingsLibraryPypiToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsLibraryPypiOutputReference | DataDatabricksJobJobSettingsSettingsLibraryPypi): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsLibraryPypiOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5074,6 +6322,55 @@ export function dataDatabricksJobJobSettingsSettingsLibraryToTerraform(struct?:
}
}
+
+export function dataDatabricksJobJobSettingsSettingsLibraryToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsLibrary | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ egg: {
+ value: cdktf.stringToHclTerraform(struct!.egg),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jar: {
+ value: cdktf.stringToHclTerraform(struct!.jar),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ whl: {
+ value: cdktf.stringToHclTerraform(struct!.whl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cran: {
+ value: dataDatabricksJobJobSettingsSettingsLibraryCranToHclTerraform(struct!.cran),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsLibraryCranList",
+ },
+ maven: {
+ value: dataDatabricksJobJobSettingsSettingsLibraryMavenToHclTerraform(struct!.maven),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsLibraryMavenList",
+ },
+ pypi: {
+ value: dataDatabricksJobJobSettingsSettingsLibraryPypiToHclTerraform(struct!.pypi),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsLibraryPypiList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsLibraryOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -5286,6 +6583,31 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterAutoscaleToTerrafo
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterAutoscaleToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterAutoscaleOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterAutoscale): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ max_workers: {
+ value: cdktf.numberToHclTerraform(struct!.maxWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_workers: {
+ value: cdktf.numberToHclTerraform(struct!.minWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterAutoscaleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5408,6 +6730,67 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterAwsAttributesToTer
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterAwsAttributesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterAwsAttributesOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_count: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_size: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5637,6 +7020,37 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterAzureAttributesToT
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterAzureAttributesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterAzureAttributesOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5746,6 +7160,25 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfs
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfsOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5838,6 +7271,61 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3To
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3ToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3OutputReference | DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6041,6 +7529,31 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfToTe
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConf): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dbfs: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfDbfsList",
+ },
+ s3: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfS3List",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6133,6 +7646,31 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNe
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ mount_options: {
+ value: cdktf.stringToHclTerraform(struct!.mountOptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ server_address: {
+ value: cdktf.stringToHclTerraform(struct!.serverAddress),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6229,6 +7767,37 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoTo
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfo | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ local_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.localMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ remote_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.remoteMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_filesystem_info: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct!.networkFilesystemInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoNetworkFilesystemInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -6369,6 +7938,31 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAu
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuthToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuthOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6457,6 +8051,31 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterDockerImageToTerra
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterDockerImageToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterDockerImageOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterDockerImage): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6566,6 +8185,55 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterGcpAttributesToTer
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterGcpAttributesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterGcpAttributesOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ boot_disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.bootDiskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(struct!.googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ use_preemptible_executors: {
+ value: cdktf.booleanToHclTerraform(struct!.usePreemptibleExecutors),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6741,6 +8409,25 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfssTo
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfssToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfssOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfss): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfssOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6806,6 +8493,25 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfsToT
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfsOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6868,6 +8574,25 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFileToT
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFileToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFileOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6933,6 +8658,25 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcsToTe
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcsOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7028,6 +8772,61 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3ToTer
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3ToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3OutputReference | DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7222,6 +9021,25 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumes
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumesOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7287,6 +9105,25 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspa
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspaceToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspaceOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7396,6 +9233,61 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsToTerra
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterInitScripts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ abfss: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfssToHclTerraform(struct!.abfss),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsAbfssList",
+ },
+ dbfs: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsDbfsList",
+ },
+ file: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsFileList",
+ },
+ gcs: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcsToHclTerraform(struct!.gcs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsGcsList",
+ },
+ s3: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsS3List",
+ },
+ volumes: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumesToHclTerraform(struct!.volumes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsVolumesList",
+ },
+ workspace: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspaceToHclTerraform(struct!.workspace),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsWorkspaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -7630,6 +9522,31 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClient
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClientsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClientsOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClients): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jobs: {
+ value: cdktf.booleanToHclTerraform(struct!.jobs),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ notebooks: {
+ value: cdktf.booleanToHclTerraform(struct!.notebooks),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClientsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7719,6 +9636,25 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeToTerr
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeOutputReference | DataDatabricksJobJobSettingsSettingsNewClusterWorkloadType): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ clients: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClientsToHclTerraform(struct!.clients),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeClientsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7944,6 +9880,199 @@ export function dataDatabricksJobJobSettingsSettingsNewClusterToTerraform(struct
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNewClusterToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNewClusterOutputReference | DataDatabricksJobJobSettingsSettingsNewCluster): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ apply_policy_default_values: {
+ value: cdktf.booleanToHclTerraform(struct!.applyPolicyDefaultValues),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(struct!.autoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_name: {
+ value: cdktf.stringToHclTerraform(struct!.clusterName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ data_security_mode: {
+ value: cdktf.stringToHclTerraform(struct!.dataSecurityMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverInstancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverNodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(struct!.enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_local_disk_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableLocalDiskEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ idempotency_token: {
+ value: cdktf.stringToHclTerraform(struct!.idempotencyToken),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_workers: {
+ value: cdktf.numberToHclTerraform(struct!.numWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ policy_id: {
+ value: cdktf.stringToHclTerraform(struct!.policyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ runtime_engine: {
+ value: cdktf.stringToHclTerraform(struct!.runtimeEngine),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ single_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.singleUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkConf),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_env_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkEnvVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_version: {
+ value: cdktf.stringToHclTerraform(struct!.sparkVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ssh_public_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.sshPublicKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ autoscale: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterAutoscaleToHclTerraform(struct!.autoscale),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterAutoscaleList",
+ },
+ aws_attributes: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterAwsAttributesToHclTerraform(struct!.awsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterAwsAttributesList",
+ },
+ azure_attributes: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterAzureAttributesToHclTerraform(struct!.azureAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterAzureAttributesList",
+ },
+ cluster_log_conf: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfToHclTerraform(struct!.clusterLogConf),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterClusterLogConfList",
+ },
+ cluster_mount_info: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoToHclTerraform, true)(struct!.clusterMountInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterClusterMountInfoList",
+ },
+ docker_image: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterDockerImageToHclTerraform(struct!.dockerImage),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterDockerImageList",
+ },
+ gcp_attributes: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterGcpAttributesToHclTerraform(struct!.gcpAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterGcpAttributesList",
+ },
+ init_scripts: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsNewClusterInitScriptsToHclTerraform, true)(struct!.initScripts),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterInitScriptsList",
+ },
+ workload_type: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeToHclTerraform(struct!.workloadType),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterWorkloadTypeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNewClusterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -8651,6 +10780,37 @@ export function dataDatabricksJobJobSettingsSettingsNotebookTaskToTerraform(stru
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNotebookTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNotebookTaskOutputReference | DataDatabricksJobJobSettingsSettingsNotebookTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ base_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.baseParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ notebook_path: {
+ value: cdktf.stringToHclTerraform(struct!.notebookPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(struct!.source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNotebookTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -8762,6 +10922,31 @@ export function dataDatabricksJobJobSettingsSettingsNotificationSettingsToTerraf
}
}
+
+export function dataDatabricksJobJobSettingsSettingsNotificationSettingsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsNotificationSettingsOutputReference | DataDatabricksJobJobSettingsSettingsNotificationSettings): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ no_alert_for_canceled_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForCanceledRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ no_alert_for_skipped_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForSkippedRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsNotificationSettingsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -8854,6 +11039,31 @@ export function dataDatabricksJobJobSettingsSettingsParameterToTerraform(struct?
}
}
+
+export function dataDatabricksJobJobSettingsSettingsParameterToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsParameter | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ default: {
+ value: cdktf.stringToHclTerraform(struct!.default),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsParameterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -8978,6 +11188,31 @@ export function dataDatabricksJobJobSettingsSettingsPipelineTaskToTerraform(stru
}
}
+
+export function dataDatabricksJobJobSettingsSettingsPipelineTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsPipelineTaskOutputReference | DataDatabricksJobJobSettingsSettingsPipelineTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ full_refresh: {
+ value: cdktf.booleanToHclTerraform(struct!.fullRefresh),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ pipeline_id: {
+ value: cdktf.stringToHclTerraform(struct!.pipelineId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsPipelineTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9077,6 +11312,43 @@ export function dataDatabricksJobJobSettingsSettingsPythonWheelTaskToTerraform(s
}
}
+
+export function dataDatabricksJobJobSettingsSettingsPythonWheelTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsPythonWheelTaskOutputReference | DataDatabricksJobJobSettingsSettingsPythonWheelTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ entry_point: {
+ value: cdktf.stringToHclTerraform(struct!.entryPoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ named_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.namedParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ package_name: {
+ value: cdktf.stringToHclTerraform(struct!.packageName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsPythonWheelTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9208,6 +11480,25 @@ export function dataDatabricksJobJobSettingsSettingsQueueToTerraform(struct?: Da
}
}
+
+export function dataDatabricksJobJobSettingsSettingsQueueToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsQueueOutputReference | DataDatabricksJobJobSettingsSettingsQueue): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ enabled: {
+ value: cdktf.booleanToHclTerraform(struct!.enabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsQueueOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9275,6 +11566,31 @@ export function dataDatabricksJobJobSettingsSettingsRunAsToTerraform(struct?: Da
}
}
+
+export function dataDatabricksJobJobSettingsSettingsRunAsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsRunAsOutputReference | DataDatabricksJobJobSettingsSettingsRunAs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ service_principal_name: {
+ value: cdktf.stringToHclTerraform(struct!.servicePrincipalName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_name: {
+ value: cdktf.stringToHclTerraform(struct!.userName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsRunAsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9367,6 +11683,31 @@ export function dataDatabricksJobJobSettingsSettingsRunJobTaskToTerraform(struct
}
}
+
+export function dataDatabricksJobJobSettingsSettingsRunJobTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsRunJobTaskOutputReference | DataDatabricksJobJobSettingsSettingsRunJobTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ job_id: {
+ value: cdktf.numberToHclTerraform(struct!.jobId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ job_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.jobParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsRunJobTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9461,6 +11802,37 @@ export function dataDatabricksJobJobSettingsSettingsScheduleToTerraform(struct?:
}
}
+
+export function dataDatabricksJobJobSettingsSettingsScheduleToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsScheduleOutputReference | DataDatabricksJobJobSettingsSettingsSchedule): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ pause_status: {
+ value: cdktf.stringToHclTerraform(struct!.pauseStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ quartz_cron_expression: {
+ value: cdktf.stringToHclTerraform(struct!.quartzCronExpression),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ timezone_id: {
+ value: cdktf.stringToHclTerraform(struct!.timezoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsScheduleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9574,6 +11946,37 @@ export function dataDatabricksJobJobSettingsSettingsSparkJarTaskToTerraform(stru
}
}
+
+export function dataDatabricksJobJobSettingsSettingsSparkJarTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsSparkJarTaskOutputReference | DataDatabricksJobJobSettingsSettingsSparkJarTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jar_uri: {
+ value: cdktf.stringToHclTerraform(struct!.jarUri),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ main_class_name: {
+ value: cdktf.stringToHclTerraform(struct!.mainClassName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsSparkJarTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9693,6 +12096,37 @@ export function dataDatabricksJobJobSettingsSettingsSparkPythonTaskToTerraform(s
}
}
+
+export function dataDatabricksJobJobSettingsSettingsSparkPythonTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsSparkPythonTaskOutputReference | DataDatabricksJobJobSettingsSettingsSparkPythonTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ python_file: {
+ value: cdktf.stringToHclTerraform(struct!.pythonFile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(struct!.source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsSparkPythonTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9799,6 +12233,25 @@ export function dataDatabricksJobJobSettingsSettingsSparkSubmitTaskToTerraform(s
}
}
+
+export function dataDatabricksJobJobSettingsSettingsSparkSubmitTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsSparkSubmitTaskOutputReference | DataDatabricksJobJobSettingsSettingsSparkSubmitTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsSparkSubmitTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9874,6 +12327,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskConditionTaskToTerraform
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskConditionTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskConditionTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskConditionTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ left: {
+ value: cdktf.stringToHclTerraform(struct!.left),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ op: {
+ value: cdktf.stringToHclTerraform(struct!.op),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ right: {
+ value: cdktf.stringToHclTerraform(struct!.right),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskConditionTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10008,6 +12492,55 @@ export function dataDatabricksJobJobSettingsSettingsTaskDbtTaskToTerraform(struc
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskDbtTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskDbtTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskDbtTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ catalog: {
+ value: cdktf.stringToHclTerraform(struct!.catalog),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ commands: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.commands),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ profiles_directory: {
+ value: cdktf.stringToHclTerraform(struct!.profilesDirectory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ project_directory: {
+ value: cdktf.stringToHclTerraform(struct!.projectDirectory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ schema: {
+ value: cdktf.stringToHclTerraform(struct!.schema),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ warehouse_id: {
+ value: cdktf.stringToHclTerraform(struct!.warehouseId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskDbtTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10185,6 +12718,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskDependsOnToTerraform(str
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskDependsOnToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskDependsOn | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ outcome: {
+ value: cdktf.stringToHclTerraform(struct!.outcome),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ task_key: {
+ value: cdktf.stringToHclTerraform(struct!.taskKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskDependsOnOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -10316,6 +12874,43 @@ export function dataDatabricksJobJobSettingsSettingsTaskEmailNotificationsToTerr
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskEmailNotificationsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskEmailNotificationsOutputReference | DataDatabricksJobJobSettingsSettingsTaskEmailNotifications): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ on_duration_warning_threshold_exceeded: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onDurationWarningThresholdExceeded),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_failure: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onFailure),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_start: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onStart),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_success: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onSuccess),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskEmailNotificationsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10457,6 +13052,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskHealthRulesToTerraform(s
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskHealthRulesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskHealthRules | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ metric: {
+ value: cdktf.stringToHclTerraform(struct!.metric),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ op: {
+ value: cdktf.stringToHclTerraform(struct!.op),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.numberToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskHealthRulesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -10600,6 +13226,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskHealthToTerraform(struct
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskHealthToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskHealthOutputReference | DataDatabricksJobJobSettingsSettingsTaskHealth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ rules: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskHealthRulesToHclTerraform, true)(struct!.rules),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskHealthRulesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskHealthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10667,6 +13312,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskLibraryCranToTerraform(s
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskLibraryCranToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskLibraryCranOutputReference | DataDatabricksJobJobSettingsSettingsTaskLibraryCran): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskLibraryCranOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10761,6 +13431,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskLibraryMavenToTerraform(
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskLibraryMavenToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskLibraryMavenOutputReference | DataDatabricksJobJobSettingsSettingsTaskLibraryMaven): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ coordinates: {
+ value: cdktf.stringToHclTerraform(struct!.coordinates),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ exclusions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.exclusions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskLibraryMavenOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10872,6 +13573,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskLibraryPypiToTerraform(s
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskLibraryPypiToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskLibraryPypiOutputReference | DataDatabricksJobJobSettingsSettingsTaskLibraryPypi): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskLibraryPypiOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10987,6 +13713,55 @@ export function dataDatabricksJobJobSettingsSettingsTaskLibraryToTerraform(struc
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskLibraryToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskLibrary | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ egg: {
+ value: cdktf.stringToHclTerraform(struct!.egg),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jar: {
+ value: cdktf.stringToHclTerraform(struct!.jar),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ whl: {
+ value: cdktf.stringToHclTerraform(struct!.whl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cran: {
+ value: dataDatabricksJobJobSettingsSettingsTaskLibraryCranToHclTerraform(struct!.cran),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskLibraryCranList",
+ },
+ maven: {
+ value: dataDatabricksJobJobSettingsSettingsTaskLibraryMavenToHclTerraform(struct!.maven),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskLibraryMavenList",
+ },
+ pypi: {
+ value: dataDatabricksJobJobSettingsSettingsTaskLibraryPypiToHclTerraform(struct!.pypi),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskLibraryPypiList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskLibraryOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -11199,6 +13974,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscaleToTer
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscaleToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscaleOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscale): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ max_workers: {
+ value: cdktf.numberToHclTerraform(struct!.maxWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_workers: {
+ value: cdktf.numberToHclTerraform(struct!.minWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscaleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11321,6 +14121,67 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributesT
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributesOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_count: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_size: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11550,6 +14411,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttribute
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributesOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11659,6 +14551,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConf
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfsOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11751,6 +14662,61 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConf
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3ToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3OutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11954,6 +14920,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConf
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConf): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dbfs: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfsList",
+ },
+ s3: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfS3List",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12046,6 +15037,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountIn
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ mount_options: {
+ value: cdktf.stringToHclTerraform(struct!.mountOptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ server_address: {
+ value: cdktf.stringToHclTerraform(struct!.serverAddress),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12142,6 +15158,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountIn
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfo | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ local_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.localMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ remote_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.remoteMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_filesystem_info: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct!.networkFilesystemInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoNetworkFilesystemInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -12282,6 +15329,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBas
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuthToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuthOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12370,6 +15442,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageToT
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImage): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12479,6 +15576,55 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributesT
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributesOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ boot_disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.bootDiskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(struct!.googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ use_preemptible_executors: {
+ value: cdktf.booleanToHclTerraform(struct!.usePreemptibleExecutors),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12654,6 +15800,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbf
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfssToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfssOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfss): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfssOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12719,6 +15884,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbf
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfsOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12781,6 +15965,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFil
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFileToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFileOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12846,6 +16049,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcs
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcsOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12941,6 +16163,61 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3T
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3ToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3OutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13135,6 +16412,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVol
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumesToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumesOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13200,6 +16496,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWor
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspaceToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspaceOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13309,6 +16624,61 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsToT
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScripts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ abfss: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfssToHclTerraform(struct!.abfss),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsAbfssList",
+ },
+ dbfs: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsDbfsList",
+ },
+ file: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsFileList",
+ },
+ gcs: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcsToHclTerraform(struct!.gcs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsGcsList",
+ },
+ s3: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsS3List",
+ },
+ volumes: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumesToHclTerraform(struct!.volumes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsVolumesList",
+ },
+ workspace: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspaceToHclTerraform(struct!.workspace),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -13543,6 +16913,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeCl
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClientsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClientsOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClients): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jobs: {
+ value: cdktf.booleanToHclTerraform(struct!.jobs),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ notebooks: {
+ value: cdktf.booleanToHclTerraform(struct!.notebooks),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClientsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13632,6 +17027,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeTo
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadType): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ clients: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClientsToHclTerraform(struct!.clients),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeClientsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13857,6 +17271,199 @@ export function dataDatabricksJobJobSettingsSettingsTaskNewClusterToTerraform(st
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNewClusterToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNewClusterOutputReference | DataDatabricksJobJobSettingsSettingsTaskNewCluster): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ apply_policy_default_values: {
+ value: cdktf.booleanToHclTerraform(struct!.applyPolicyDefaultValues),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(struct!.autoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_name: {
+ value: cdktf.stringToHclTerraform(struct!.clusterName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ data_security_mode: {
+ value: cdktf.stringToHclTerraform(struct!.dataSecurityMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverInstancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverNodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(struct!.enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_local_disk_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableLocalDiskEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ idempotency_token: {
+ value: cdktf.stringToHclTerraform(struct!.idempotencyToken),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_workers: {
+ value: cdktf.numberToHclTerraform(struct!.numWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ policy_id: {
+ value: cdktf.stringToHclTerraform(struct!.policyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ runtime_engine: {
+ value: cdktf.stringToHclTerraform(struct!.runtimeEngine),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ single_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.singleUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkConf),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_env_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkEnvVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_version: {
+ value: cdktf.stringToHclTerraform(struct!.sparkVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ssh_public_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.sshPublicKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ autoscale: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscaleToHclTerraform(struct!.autoscale),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterAutoscaleList",
+ },
+ aws_attributes: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributesToHclTerraform(struct!.awsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterAwsAttributesList",
+ },
+ azure_attributes: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributesToHclTerraform(struct!.azureAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterAzureAttributesList",
+ },
+ cluster_log_conf: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfToHclTerraform(struct!.clusterLogConf),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterLogConfList",
+ },
+ cluster_mount_info: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoToHclTerraform, true)(struct!.clusterMountInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterClusterMountInfoList",
+ },
+ docker_image: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageToHclTerraform(struct!.dockerImage),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterDockerImageList",
+ },
+ gcp_attributes: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributesToHclTerraform(struct!.gcpAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterGcpAttributesList",
+ },
+ init_scripts: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsToHclTerraform, true)(struct!.initScripts),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterInitScriptsList",
+ },
+ workload_type: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeToHclTerraform(struct!.workloadType),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterWorkloadTypeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNewClusterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -14564,6 +18171,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskNotebookTaskToTerraform(
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNotebookTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNotebookTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskNotebookTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ base_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.baseParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ notebook_path: {
+ value: cdktf.stringToHclTerraform(struct!.notebookPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(struct!.source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNotebookTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -14680,6 +18318,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskNotificationSettingsToTe
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskNotificationSettingsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskNotificationSettingsOutputReference | DataDatabricksJobJobSettingsSettingsTaskNotificationSettings): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ alert_on_last_attempt: {
+ value: cdktf.booleanToHclTerraform(struct!.alertOnLastAttempt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ no_alert_for_canceled_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForCanceledRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ no_alert_for_skipped_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForSkippedRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskNotificationSettingsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -14794,6 +18463,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskPipelineTaskToTerraform(
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskPipelineTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskPipelineTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskPipelineTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ full_refresh: {
+ value: cdktf.booleanToHclTerraform(struct!.fullRefresh),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ pipeline_id: {
+ value: cdktf.stringToHclTerraform(struct!.pipelineId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskPipelineTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -14893,6 +18587,43 @@ export function dataDatabricksJobJobSettingsSettingsTaskPythonWheelTaskToTerrafo
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskPythonWheelTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskPythonWheelTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskPythonWheelTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ entry_point: {
+ value: cdktf.stringToHclTerraform(struct!.entryPoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ named_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.namedParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ package_name: {
+ value: cdktf.stringToHclTerraform(struct!.packageName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskPythonWheelTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15029,6 +18760,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskRunJobTaskToTerraform(st
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskRunJobTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskRunJobTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskRunJobTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ job_id: {
+ value: cdktf.numberToHclTerraform(struct!.jobId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ job_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.jobParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskRunJobTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15123,6 +18879,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskSparkJarTaskToTerraform(
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSparkJarTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSparkJarTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskSparkJarTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jar_uri: {
+ value: cdktf.stringToHclTerraform(struct!.jarUri),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ main_class_name: {
+ value: cdktf.stringToHclTerraform(struct!.mainClassName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSparkJarTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15242,6 +19029,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskSparkPythonTaskToTerrafo
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSparkPythonTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSparkPythonTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskSparkPythonTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ python_file: {
+ value: cdktf.stringToHclTerraform(struct!.pythonFile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(struct!.source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSparkPythonTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15348,6 +19166,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskSparkSubmitTaskToTerrafo
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSparkSubmitTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSparkSubmitTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskSparkSubmitTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSparkSubmitTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15418,6 +19255,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscription
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptionsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptions | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination_id: {
+ value: cdktf.stringToHclTerraform(struct!.destinationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_name: {
+ value: cdktf.stringToHclTerraform(struct!.userName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -15549,6 +19411,37 @@ export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertToTerraform(
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertOutputReference | DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlert): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ alert_id: {
+ value: cdktf.stringToHclTerraform(struct!.alertId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ pause_subscriptions: {
+ value: cdktf.booleanToHclTerraform(struct!.pauseSubscriptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ subscriptions: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptionsToHclTerraform, true)(struct!.subscriptions),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertSubscriptionsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15657,6 +19550,31 @@ export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscrip
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptionsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptions | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination_id: {
+ value: cdktf.stringToHclTerraform(struct!.destinationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_name: {
+ value: cdktf.stringToHclTerraform(struct!.userName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -15793,6 +19711,43 @@ export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardToTerraf
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardOutputReference | DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboard): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ custom_subject: {
+ value: cdktf.stringToHclTerraform(struct!.customSubject),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ dashboard_id: {
+ value: cdktf.stringToHclTerraform(struct!.dashboardId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ pause_subscriptions: {
+ value: cdktf.booleanToHclTerraform(struct!.pauseSubscriptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ subscriptions: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptionsToHclTerraform, true)(struct!.subscriptions),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptionsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15921,6 +19876,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskFileToTerraform(s
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskFileToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSqlTaskFileOutputReference | DataDatabricksJobJobSettingsSettingsTaskSqlTaskFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ path: {
+ value: cdktf.stringToHclTerraform(struct!.path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSqlTaskFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15983,6 +19957,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskQueryToTerraform(
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskQueryToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSqlTaskQueryOutputReference | DataDatabricksJobJobSettingsSettingsTaskSqlTaskQuery): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ query_id: {
+ value: cdktf.stringToHclTerraform(struct!.queryId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSqlTaskQueryOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -16078,6 +20071,55 @@ export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskToTerraform(struc
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskSqlTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskSqlTaskOutputReference | DataDatabricksJobJobSettingsSettingsTaskSqlTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.parameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ warehouse_id: {
+ value: cdktf.stringToHclTerraform(struct!.warehouseId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ alert: {
+ value: dataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertToHclTerraform(struct!.alert),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSqlTaskAlertList",
+ },
+ dashboard: {
+ value: dataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardToHclTerraform(struct!.dashboard),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSqlTaskDashboardList",
+ },
+ file: {
+ value: dataDatabricksJobJobSettingsSettingsTaskSqlTaskFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSqlTaskFileList",
+ },
+ query: {
+ value: dataDatabricksJobJobSettingsSettingsTaskSqlTaskQueryToHclTerraform(struct!.query),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSqlTaskQueryList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskSqlTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -16256,6 +20298,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDu
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceededToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceeded | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceededOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -16356,6 +20417,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFa
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailureToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailure | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailureOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -16456,6 +20536,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSt
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStartToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStart | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStartOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -16556,6 +20655,25 @@ export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSu
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccessToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccessOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -16676,6 +20794,43 @@ export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsToTe
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOutputReference | DataDatabricksJobJobSettingsSettingsTaskWebhookNotifications): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ on_duration_warning_threshold_exceeded: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceededToHclTerraform, true)(struct!.onDurationWarningThresholdExceeded),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceededList",
+ },
+ on_failure: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailureToHclTerraform, true)(struct!.onFailure),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnFailureList",
+ },
+ on_start: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStartToHclTerraform, true)(struct!.onStart),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnStartList",
+ },
+ on_success: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccessToHclTerraform, true)(struct!.onSuccess),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOnSuccessList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -16971,6 +21126,181 @@ export function dataDatabricksJobJobSettingsSettingsTaskToTerraform(struct?: Dat
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTaskToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTask | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ compute_key: {
+ value: cdktf.stringToHclTerraform(struct!.computeKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ description: {
+ value: cdktf.stringToHclTerraform(struct!.description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ existing_cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.existingClusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_cluster_key: {
+ value: cdktf.stringToHclTerraform(struct!.jobClusterKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ max_retries: {
+ value: cdktf.numberToHclTerraform(struct!.maxRetries),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_retry_interval_millis: {
+ value: cdktf.numberToHclTerraform(struct!.minRetryIntervalMillis),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ retry_on_timeout: {
+ value: cdktf.booleanToHclTerraform(struct!.retryOnTimeout),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ run_if: {
+ value: cdktf.stringToHclTerraform(struct!.runIf),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ task_key: {
+ value: cdktf.stringToHclTerraform(struct!.taskKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ timeout_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.timeoutSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ condition_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskConditionTaskToHclTerraform(struct!.conditionTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskConditionTaskList",
+ },
+ dbt_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskDbtTaskToHclTerraform(struct!.dbtTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskDbtTaskList",
+ },
+ depends_on: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskDependsOnToHclTerraform, true)(struct!.dependsOn),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskDependsOnList",
+ },
+ email_notifications: {
+ value: dataDatabricksJobJobSettingsSettingsTaskEmailNotificationsToHclTerraform(struct!.emailNotifications),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskEmailNotificationsList",
+ },
+ health: {
+ value: dataDatabricksJobJobSettingsSettingsTaskHealthToHclTerraform(struct!.health),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskHealthList",
+ },
+ library: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskLibraryToHclTerraform, true)(struct!.library),
+ isBlock: true,
+ type: "set",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskLibraryList",
+ },
+ new_cluster: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNewClusterToHclTerraform(struct!.newCluster),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNewClusterList",
+ },
+ notebook_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNotebookTaskToHclTerraform(struct!.notebookTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNotebookTaskList",
+ },
+ notification_settings: {
+ value: dataDatabricksJobJobSettingsSettingsTaskNotificationSettingsToHclTerraform(struct!.notificationSettings),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskNotificationSettingsList",
+ },
+ pipeline_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskPipelineTaskToHclTerraform(struct!.pipelineTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskPipelineTaskList",
+ },
+ python_wheel_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskPythonWheelTaskToHclTerraform(struct!.pythonWheelTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskPythonWheelTaskList",
+ },
+ run_job_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskRunJobTaskToHclTerraform(struct!.runJobTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskRunJobTaskList",
+ },
+ spark_jar_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskSparkJarTaskToHclTerraform(struct!.sparkJarTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSparkJarTaskList",
+ },
+ spark_python_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskSparkPythonTaskToHclTerraform(struct!.sparkPythonTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSparkPythonTaskList",
+ },
+ spark_submit_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskSparkSubmitTaskToHclTerraform(struct!.sparkSubmitTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSparkSubmitTaskList",
+ },
+ sql_task: {
+ value: dataDatabricksJobJobSettingsSettingsTaskSqlTaskToHclTerraform(struct!.sqlTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskSqlTaskList",
+ },
+ webhook_notifications: {
+ value: dataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsToHclTerraform(struct!.webhookNotifications),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskWebhookNotificationsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -17650,6 +21980,37 @@ export function dataDatabricksJobJobSettingsSettingsTriggerFileArrivalToTerrafor
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTriggerFileArrivalToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTriggerFileArrivalOutputReference | DataDatabricksJobJobSettingsSettingsTriggerFileArrival): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ min_time_between_triggers_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.minTimeBetweenTriggersSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ wait_after_last_change_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.waitAfterLastChangeSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTriggerFileArrivalOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -17763,6 +22124,31 @@ export function dataDatabricksJobJobSettingsSettingsTriggerToTerraform(struct?:
}
}
+
+export function dataDatabricksJobJobSettingsSettingsTriggerToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsTriggerOutputReference | DataDatabricksJobJobSettingsSettingsTrigger): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ pause_status: {
+ value: cdktf.stringToHclTerraform(struct!.pauseStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ file_arrival: {
+ value: dataDatabricksJobJobSettingsSettingsTriggerFileArrivalToHclTerraform(struct!.fileArrival),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTriggerFileArrivalList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsTriggerOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
diff --git a/src/data-databricks-job/index-structs/structs400.ts b/src/data-databricks-job/index-structs/structs400.ts
index 41a8c239a..db24e77ac 100644
--- a/src/data-databricks-job/index-structs/structs400.ts
+++ b/src/data-databricks-job/index-structs/structs400.ts
@@ -6,75 +6,99 @@
import * as cdktf from 'cdktf';
import { DataDatabricksJobJobSettingsSettingsCompute,
dataDatabricksJobJobSettingsSettingsComputeToTerraform,
+dataDatabricksJobJobSettingsSettingsComputeToHclTerraform,
DataDatabricksJobJobSettingsSettingsComputeList,
DataDatabricksJobJobSettingsSettingsContinuous,
dataDatabricksJobJobSettingsSettingsContinuousToTerraform,
+dataDatabricksJobJobSettingsSettingsContinuousToHclTerraform,
DataDatabricksJobJobSettingsSettingsContinuousOutputReference,
DataDatabricksJobJobSettingsSettingsDbtTask,
dataDatabricksJobJobSettingsSettingsDbtTaskToTerraform,
+dataDatabricksJobJobSettingsSettingsDbtTaskToHclTerraform,
DataDatabricksJobJobSettingsSettingsDbtTaskOutputReference,
DataDatabricksJobJobSettingsSettingsDeployment,
dataDatabricksJobJobSettingsSettingsDeploymentToTerraform,
+dataDatabricksJobJobSettingsSettingsDeploymentToHclTerraform,
DataDatabricksJobJobSettingsSettingsDeploymentOutputReference,
DataDatabricksJobJobSettingsSettingsEmailNotifications,
dataDatabricksJobJobSettingsSettingsEmailNotificationsToTerraform,
+dataDatabricksJobJobSettingsSettingsEmailNotificationsToHclTerraform,
DataDatabricksJobJobSettingsSettingsEmailNotificationsOutputReference,
DataDatabricksJobJobSettingsSettingsGitSource,
dataDatabricksJobJobSettingsSettingsGitSourceToTerraform,
+dataDatabricksJobJobSettingsSettingsGitSourceToHclTerraform,
DataDatabricksJobJobSettingsSettingsGitSourceOutputReference,
DataDatabricksJobJobSettingsSettingsHealth,
dataDatabricksJobJobSettingsSettingsHealthToTerraform,
+dataDatabricksJobJobSettingsSettingsHealthToHclTerraform,
DataDatabricksJobJobSettingsSettingsHealthOutputReference,
DataDatabricksJobJobSettingsSettingsJobCluster,
dataDatabricksJobJobSettingsSettingsJobClusterToTerraform,
+dataDatabricksJobJobSettingsSettingsJobClusterToHclTerraform,
DataDatabricksJobJobSettingsSettingsJobClusterList,
DataDatabricksJobJobSettingsSettingsLibrary,
dataDatabricksJobJobSettingsSettingsLibraryToTerraform,
+dataDatabricksJobJobSettingsSettingsLibraryToHclTerraform,
DataDatabricksJobJobSettingsSettingsLibraryList,
DataDatabricksJobJobSettingsSettingsNewCluster,
dataDatabricksJobJobSettingsSettingsNewClusterToTerraform,
+dataDatabricksJobJobSettingsSettingsNewClusterToHclTerraform,
DataDatabricksJobJobSettingsSettingsNewClusterOutputReference,
DataDatabricksJobJobSettingsSettingsNotebookTask,
dataDatabricksJobJobSettingsSettingsNotebookTaskToTerraform,
+dataDatabricksJobJobSettingsSettingsNotebookTaskToHclTerraform,
DataDatabricksJobJobSettingsSettingsNotebookTaskOutputReference,
DataDatabricksJobJobSettingsSettingsNotificationSettings,
dataDatabricksJobJobSettingsSettingsNotificationSettingsToTerraform,
+dataDatabricksJobJobSettingsSettingsNotificationSettingsToHclTerraform,
DataDatabricksJobJobSettingsSettingsNotificationSettingsOutputReference,
DataDatabricksJobJobSettingsSettingsParameter,
dataDatabricksJobJobSettingsSettingsParameterToTerraform,
+dataDatabricksJobJobSettingsSettingsParameterToHclTerraform,
DataDatabricksJobJobSettingsSettingsParameterList,
DataDatabricksJobJobSettingsSettingsPipelineTask,
dataDatabricksJobJobSettingsSettingsPipelineTaskToTerraform,
+dataDatabricksJobJobSettingsSettingsPipelineTaskToHclTerraform,
DataDatabricksJobJobSettingsSettingsPipelineTaskOutputReference,
DataDatabricksJobJobSettingsSettingsPythonWheelTask,
dataDatabricksJobJobSettingsSettingsPythonWheelTaskToTerraform,
+dataDatabricksJobJobSettingsSettingsPythonWheelTaskToHclTerraform,
DataDatabricksJobJobSettingsSettingsPythonWheelTaskOutputReference,
DataDatabricksJobJobSettingsSettingsQueue,
dataDatabricksJobJobSettingsSettingsQueueToTerraform,
+dataDatabricksJobJobSettingsSettingsQueueToHclTerraform,
DataDatabricksJobJobSettingsSettingsQueueOutputReference,
DataDatabricksJobJobSettingsSettingsRunAs,
dataDatabricksJobJobSettingsSettingsRunAsToTerraform,
+dataDatabricksJobJobSettingsSettingsRunAsToHclTerraform,
DataDatabricksJobJobSettingsSettingsRunAsOutputReference,
DataDatabricksJobJobSettingsSettingsRunJobTask,
dataDatabricksJobJobSettingsSettingsRunJobTaskToTerraform,
+dataDatabricksJobJobSettingsSettingsRunJobTaskToHclTerraform,
DataDatabricksJobJobSettingsSettingsRunJobTaskOutputReference,
DataDatabricksJobJobSettingsSettingsSchedule,
dataDatabricksJobJobSettingsSettingsScheduleToTerraform,
+dataDatabricksJobJobSettingsSettingsScheduleToHclTerraform,
DataDatabricksJobJobSettingsSettingsScheduleOutputReference,
DataDatabricksJobJobSettingsSettingsSparkJarTask,
dataDatabricksJobJobSettingsSettingsSparkJarTaskToTerraform,
+dataDatabricksJobJobSettingsSettingsSparkJarTaskToHclTerraform,
DataDatabricksJobJobSettingsSettingsSparkJarTaskOutputReference,
DataDatabricksJobJobSettingsSettingsSparkPythonTask,
dataDatabricksJobJobSettingsSettingsSparkPythonTaskToTerraform,
+dataDatabricksJobJobSettingsSettingsSparkPythonTaskToHclTerraform,
DataDatabricksJobJobSettingsSettingsSparkPythonTaskOutputReference,
DataDatabricksJobJobSettingsSettingsSparkSubmitTask,
dataDatabricksJobJobSettingsSettingsSparkSubmitTaskToTerraform,
+dataDatabricksJobJobSettingsSettingsSparkSubmitTaskToHclTerraform,
DataDatabricksJobJobSettingsSettingsSparkSubmitTaskOutputReference,
DataDatabricksJobJobSettingsSettingsTask,
dataDatabricksJobJobSettingsSettingsTaskToTerraform,
+dataDatabricksJobJobSettingsSettingsTaskToHclTerraform,
DataDatabricksJobJobSettingsSettingsTaskList,
DataDatabricksJobJobSettingsSettingsTrigger,
dataDatabricksJobJobSettingsSettingsTriggerToTerraform,
+dataDatabricksJobJobSettingsSettingsTriggerToHclTerraform,
DataDatabricksJobJobSettingsSettingsTriggerOutputReference } from './structs0'
export interface DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded {
/**
@@ -96,6 +120,25 @@ export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurati
}
}
+
+export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceededToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceededOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -196,6 +239,25 @@ export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailur
}
}
+
+export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailureToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailure | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailureOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -296,6 +358,25 @@ export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStartT
}
}
+
+export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStartToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStart | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStartOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -396,6 +477,25 @@ export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSucces
}
}
+
+export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccessToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccess | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccessOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -516,6 +616,43 @@ export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsToTerraf
}
}
+
+export function dataDatabricksJobJobSettingsSettingsWebhookNotificationsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsWebhookNotificationsOutputReference | DataDatabricksJobJobSettingsSettingsWebhookNotifications): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ on_duration_warning_threshold_exceeded: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceededToHclTerraform, true)(struct!.onDurationWarningThresholdExceeded),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceededList",
+ },
+ on_failure: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailureToHclTerraform, true)(struct!.onFailure),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnFailureList",
+ },
+ on_start: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStartToHclTerraform, true)(struct!.onStart),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnStartList",
+ },
+ on_success: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccessToHclTerraform, true)(struct!.onSuccess),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsWebhookNotificationsOnSuccessList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsWebhookNotificationsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -872,6 +1009,235 @@ export function dataDatabricksJobJobSettingsSettingsToTerraform(struct?: DataDat
}
}
+
+export function dataDatabricksJobJobSettingsSettingsToHclTerraform(struct?: DataDatabricksJobJobSettingsSettingsOutputReference | DataDatabricksJobJobSettingsSettings): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ description: {
+ value: cdktf.stringToHclTerraform(struct!.description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ edit_mode: {
+ value: cdktf.stringToHclTerraform(struct!.editMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ existing_cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.existingClusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ format: {
+ value: cdktf.stringToHclTerraform(struct!.format),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ max_concurrent_runs: {
+ value: cdktf.numberToHclTerraform(struct!.maxConcurrentRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ max_retries: {
+ value: cdktf.numberToHclTerraform(struct!.maxRetries),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_retry_interval_millis: {
+ value: cdktf.numberToHclTerraform(struct!.minRetryIntervalMillis),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ retry_on_timeout: {
+ value: cdktf.booleanToHclTerraform(struct!.retryOnTimeout),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.tags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ timeout_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.timeoutSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ compute: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsComputeToHclTerraform, true)(struct!.compute),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsComputeList",
+ },
+ continuous: {
+ value: dataDatabricksJobJobSettingsSettingsContinuousToHclTerraform(struct!.continuous),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsContinuousList",
+ },
+ dbt_task: {
+ value: dataDatabricksJobJobSettingsSettingsDbtTaskToHclTerraform(struct!.dbtTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsDbtTaskList",
+ },
+ deployment: {
+ value: dataDatabricksJobJobSettingsSettingsDeploymentToHclTerraform(struct!.deployment),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsDeploymentList",
+ },
+ email_notifications: {
+ value: dataDatabricksJobJobSettingsSettingsEmailNotificationsToHclTerraform(struct!.emailNotifications),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsEmailNotificationsList",
+ },
+ git_source: {
+ value: dataDatabricksJobJobSettingsSettingsGitSourceToHclTerraform(struct!.gitSource),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsGitSourceList",
+ },
+ health: {
+ value: dataDatabricksJobJobSettingsSettingsHealthToHclTerraform(struct!.health),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsHealthList",
+ },
+ job_cluster: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsJobClusterToHclTerraform, true)(struct!.jobCluster),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsJobClusterList",
+ },
+ library: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsLibraryToHclTerraform, true)(struct!.library),
+ isBlock: true,
+ type: "set",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsLibraryList",
+ },
+ new_cluster: {
+ value: dataDatabricksJobJobSettingsSettingsNewClusterToHclTerraform(struct!.newCluster),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNewClusterList",
+ },
+ notebook_task: {
+ value: dataDatabricksJobJobSettingsSettingsNotebookTaskToHclTerraform(struct!.notebookTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNotebookTaskList",
+ },
+ notification_settings: {
+ value: dataDatabricksJobJobSettingsSettingsNotificationSettingsToHclTerraform(struct!.notificationSettings),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsNotificationSettingsList",
+ },
+ parameter: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsParameterToHclTerraform, true)(struct!.parameter),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsParameterList",
+ },
+ pipeline_task: {
+ value: dataDatabricksJobJobSettingsSettingsPipelineTaskToHclTerraform(struct!.pipelineTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsPipelineTaskList",
+ },
+ python_wheel_task: {
+ value: dataDatabricksJobJobSettingsSettingsPythonWheelTaskToHclTerraform(struct!.pythonWheelTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsPythonWheelTaskList",
+ },
+ queue: {
+ value: dataDatabricksJobJobSettingsSettingsQueueToHclTerraform(struct!.queue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsQueueList",
+ },
+ run_as: {
+ value: dataDatabricksJobJobSettingsSettingsRunAsToHclTerraform(struct!.runAs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsRunAsList",
+ },
+ run_job_task: {
+ value: dataDatabricksJobJobSettingsSettingsRunJobTaskToHclTerraform(struct!.runJobTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsRunJobTaskList",
+ },
+ schedule: {
+ value: dataDatabricksJobJobSettingsSettingsScheduleToHclTerraform(struct!.schedule),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsScheduleList",
+ },
+ spark_jar_task: {
+ value: dataDatabricksJobJobSettingsSettingsSparkJarTaskToHclTerraform(struct!.sparkJarTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsSparkJarTaskList",
+ },
+ spark_python_task: {
+ value: dataDatabricksJobJobSettingsSettingsSparkPythonTaskToHclTerraform(struct!.sparkPythonTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsSparkPythonTaskList",
+ },
+ spark_submit_task: {
+ value: dataDatabricksJobJobSettingsSettingsSparkSubmitTaskToHclTerraform(struct!.sparkSubmitTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsSparkSubmitTaskList",
+ },
+ task: {
+ value: cdktf.listMapperHcl(dataDatabricksJobJobSettingsSettingsTaskToHclTerraform, true)(struct!.task),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTaskList",
+ },
+ trigger: {
+ value: dataDatabricksJobJobSettingsSettingsTriggerToHclTerraform(struct!.trigger),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsTriggerList",
+ },
+ webhook_notifications: {
+ value: dataDatabricksJobJobSettingsSettingsWebhookNotificationsToHclTerraform(struct!.webhookNotifications),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsWebhookNotificationsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsSettingsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1729,6 +2095,49 @@ export function dataDatabricksJobJobSettingsToTerraform(struct?: DataDatabricksJ
}
}
+
+export function dataDatabricksJobJobSettingsToHclTerraform(struct?: DataDatabricksJobJobSettingsOutputReference | DataDatabricksJobJobSettings): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ created_time: {
+ value: cdktf.numberToHclTerraform(struct!.createdTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ creator_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.creatorUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_id: {
+ value: cdktf.numberToHclTerraform(struct!.jobId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ run_as_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.runAsUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ settings: {
+ value: dataDatabricksJobJobSettingsSettingsToHclTerraform(struct!.settings),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsSettingsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksJobJobSettingsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
diff --git a/src/data-databricks-job/index.ts b/src/data-databricks-job/index.ts
index 3f1fbccb7..73df33506 100644
--- a/src/data-databricks-job/index.ts
+++ b/src/data-databricks-job/index.ts
@@ -7,6 +7,7 @@
import { DataDatabricksJobJobSettings,
dataDatabricksJobJobSettingsToTerraform,
+dataDatabricksJobJobSettingsToHclTerraform,
DataDatabricksJobJobSettingsOutputReference} from './index-structs'
export * from './index-structs'
import { Construct } from 'constructs';
@@ -194,4 +195,42 @@ export class DataDatabricksJob extends cdktf.TerraformDataSource {
job_settings: dataDatabricksJobJobSettingsToTerraform(this._jobSettings.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_id: {
+ value: cdktf.stringToHclTerraform(this._jobId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_name: {
+ value: cdktf.stringToHclTerraform(this._jobName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_settings: {
+ value: dataDatabricksJobJobSettingsToHclTerraform(this._jobSettings.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksJobJobSettingsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-jobs/README.md b/src/data-databricks-jobs/README.md
index 331c282c9..9260f2456 100644
--- a/src/data-databricks-jobs/README.md
+++ b/src/data-databricks-jobs/README.md
@@ -1,3 +1,3 @@
# `data_databricks_jobs`
-Refer to the Terraform Registory for docs: [`data_databricks_jobs`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/jobs).
+Refer to the Terraform Registry for docs: [`data_databricks_jobs`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/jobs).
diff --git a/src/data-databricks-jobs/index.ts b/src/data-databricks-jobs/index.ts
index 2f7692105..bb90fedeb 100644
--- a/src/data-databricks-jobs/index.ts
+++ b/src/data-databricks-jobs/index.ts
@@ -126,4 +126,24 @@ export class DataDatabricksJobs extends cdktf.TerraformDataSource {
ids: cdktf.hashMapper(cdktf.stringToTerraform)(this._ids),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._ids),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-metastore/README.md b/src/data-databricks-metastore/README.md
index 673f9667e..58724ed42 100644
--- a/src/data-databricks-metastore/README.md
+++ b/src/data-databricks-metastore/README.md
@@ -1,3 +1,3 @@
# `data_databricks_metastore`
-Refer to the Terraform Registory for docs: [`data_databricks_metastore`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/metastore).
+Refer to the Terraform Registry for docs: [`data_databricks_metastore`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/metastore).
diff --git a/src/data-databricks-metastore/index.ts b/src/data-databricks-metastore/index.ts
index 5ede07970..c2e605a11 100644
--- a/src/data-databricks-metastore/index.ts
+++ b/src/data-databricks-metastore/index.ts
@@ -132,6 +132,127 @@ export function dataDatabricksMetastoreMetastoreInfoToTerraform(struct?: DataDat
}
}
+
+export function dataDatabricksMetastoreMetastoreInfoToHclTerraform(struct?: DataDatabricksMetastoreMetastoreInfoOutputReference | DataDatabricksMetastoreMetastoreInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ cloud: {
+ value: cdktf.stringToHclTerraform(struct!.cloud),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ created_at: {
+ value: cdktf.numberToHclTerraform(struct!.createdAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ created_by: {
+ value: cdktf.stringToHclTerraform(struct!.createdBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ default_data_access_config_id: {
+ value: cdktf.stringToHclTerraform(struct!.defaultDataAccessConfigId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ delta_sharing_organization_name: {
+ value: cdktf.stringToHclTerraform(struct!.deltaSharingOrganizationName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ delta_sharing_recipient_token_lifetime_in_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.deltaSharingRecipientTokenLifetimeInSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ delta_sharing_scope: {
+ value: cdktf.stringToHclTerraform(struct!.deltaSharingScope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ global_metastore_id: {
+ value: cdktf.stringToHclTerraform(struct!.globalMetastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(struct!.metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(struct!.owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ privilege_model_version: {
+ value: cdktf.stringToHclTerraform(struct!.privilegeModelVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_root: {
+ value: cdktf.stringToHclTerraform(struct!.storageRoot),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_root_credential_id: {
+ value: cdktf.stringToHclTerraform(struct!.storageRootCredentialId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_root_credential_name: {
+ value: cdktf.stringToHclTerraform(struct!.storageRootCredentialName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ updated_at: {
+ value: cdktf.numberToHclTerraform(struct!.updatedAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ updated_by: {
+ value: cdktf.stringToHclTerraform(struct!.updatedBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksMetastoreMetastoreInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -671,4 +792,30 @@ export class DataDatabricksMetastore extends cdktf.TerraformDataSource {
metastore_info: dataDatabricksMetastoreMetastoreInfoToTerraform(this._metastoreInfo.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_info: {
+ value: dataDatabricksMetastoreMetastoreInfoToHclTerraform(this._metastoreInfo.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksMetastoreMetastoreInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-metastores/README.md b/src/data-databricks-metastores/README.md
index 4ce342025..91e20faf2 100644
--- a/src/data-databricks-metastores/README.md
+++ b/src/data-databricks-metastores/README.md
@@ -1,3 +1,3 @@
# `data_databricks_metastores`
-Refer to the Terraform Registory for docs: [`data_databricks_metastores`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/metastores).
+Refer to the Terraform Registry for docs: [`data_databricks_metastores`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/metastores).
diff --git a/src/data-databricks-metastores/index.ts b/src/data-databricks-metastores/index.ts
index 60785242f..5c1e639e8 100644
--- a/src/data-databricks-metastores/index.ts
+++ b/src/data-databricks-metastores/index.ts
@@ -126,4 +126,24 @@ export class DataDatabricksMetastores extends cdktf.TerraformDataSource {
ids: cdktf.hashMapper(cdktf.stringToTerraform)(this._ids),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._ids),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-mlflow-model/README.md b/src/data-databricks-mlflow-model/README.md
index d7522a35a..da5d644bc 100644
--- a/src/data-databricks-mlflow-model/README.md
+++ b/src/data-databricks-mlflow-model/README.md
@@ -1,3 +1,3 @@
# `data_databricks_mlflow_model`
-Refer to the Terraform Registory for docs: [`data_databricks_mlflow_model`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/mlflow_model).
+Refer to the Terraform Registry for docs: [`data_databricks_mlflow_model`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/mlflow_model).
diff --git a/src/data-databricks-mlflow-model/index.ts b/src/data-databricks-mlflow-model/index.ts
index 6cc2fb924..6c640a790 100644
--- a/src/data-databricks-mlflow-model/index.ts
+++ b/src/data-databricks-mlflow-model/index.ts
@@ -63,6 +63,31 @@ export function dataDatabricksMlflowModelLatestVersionsTagsToTerraform(struct?:
}
}
+
+export function dataDatabricksMlflowModelLatestVersionsTagsToHclTerraform(struct?: DataDatabricksMlflowModelLatestVersionsTags | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ key: {
+ value: cdktf.stringToHclTerraform(struct!.key),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksMlflowModelLatestVersionsTagsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -244,6 +269,97 @@ export function dataDatabricksMlflowModelLatestVersionsToTerraform(struct?: Data
}
}
+
+export function dataDatabricksMlflowModelLatestVersionsToHclTerraform(struct?: DataDatabricksMlflowModelLatestVersions | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ creation_timestamp: {
+ value: cdktf.numberToHclTerraform(struct!.creationTimestamp),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ current_stage: {
+ value: cdktf.stringToHclTerraform(struct!.currentStage),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ description: {
+ value: cdktf.stringToHclTerraform(struct!.description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ last_updated_timestamp: {
+ value: cdktf.numberToHclTerraform(struct!.lastUpdatedTimestamp),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ run_id: {
+ value: cdktf.stringToHclTerraform(struct!.runId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ run_link: {
+ value: cdktf.stringToHclTerraform(struct!.runLink),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(struct!.source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ status: {
+ value: cdktf.stringToHclTerraform(struct!.status),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ status_message: {
+ value: cdktf.stringToHclTerraform(struct!.statusMessage),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_id: {
+ value: cdktf.stringToHclTerraform(struct!.userId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ version: {
+ value: cdktf.stringToHclTerraform(struct!.version),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tags: {
+ value: cdktf.listMapperHcl(dataDatabricksMlflowModelLatestVersionsTagsToHclTerraform, true)(struct!.tags),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksMlflowModelLatestVersionsTagsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksMlflowModelLatestVersionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -610,6 +726,31 @@ export function dataDatabricksMlflowModelTagsToTerraform(struct?: DataDatabricks
}
}
+
+export function dataDatabricksMlflowModelTagsToHclTerraform(struct?: DataDatabricksMlflowModelTags | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ key: {
+ value: cdktf.stringToHclTerraform(struct!.key),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksMlflowModelTagsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -888,4 +1029,48 @@ export class DataDatabricksMlflowModel extends cdktf.TerraformDataSource {
tags: cdktf.listMapper(dataDatabricksMlflowModelTagsToTerraform, true)(this._tags.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ permission_level: {
+ value: cdktf.stringToHclTerraform(this._permissionLevel),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_id: {
+ value: cdktf.stringToHclTerraform(this._userId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ latest_versions: {
+ value: cdktf.listMapperHcl(dataDatabricksMlflowModelLatestVersionsToHclTerraform, true)(this._latestVersions.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksMlflowModelLatestVersionsList",
+ },
+ tags: {
+ value: cdktf.listMapperHcl(dataDatabricksMlflowModelTagsToHclTerraform, true)(this._tags.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksMlflowModelTagsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-mws-credentials/README.md b/src/data-databricks-mws-credentials/README.md
index 26304a9de..4bc7d9873 100644
--- a/src/data-databricks-mws-credentials/README.md
+++ b/src/data-databricks-mws-credentials/README.md
@@ -1,3 +1,3 @@
# `data_databricks_mws_credentials`
-Refer to the Terraform Registory for docs: [`data_databricks_mws_credentials`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/mws_credentials).
+Refer to the Terraform Registry for docs: [`data_databricks_mws_credentials`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/mws_credentials).
diff --git a/src/data-databricks-mws-credentials/index.ts b/src/data-databricks-mws-credentials/index.ts
index 9ac247ff9..5f81e0bfd 100644
--- a/src/data-databricks-mws-credentials/index.ts
+++ b/src/data-databricks-mws-credentials/index.ts
@@ -126,4 +126,24 @@ export class DataDatabricksMwsCredentials extends cdktf.TerraformDataSource {
ids: cdktf.hashMapper(cdktf.stringToTerraform)(this._ids),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._ids),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-mws-workspaces/README.md b/src/data-databricks-mws-workspaces/README.md
index daa97ec6c..93da0e4d5 100644
--- a/src/data-databricks-mws-workspaces/README.md
+++ b/src/data-databricks-mws-workspaces/README.md
@@ -1,3 +1,3 @@
# `data_databricks_mws_workspaces`
-Refer to the Terraform Registory for docs: [`data_databricks_mws_workspaces`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/mws_workspaces).
+Refer to the Terraform Registry for docs: [`data_databricks_mws_workspaces`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/mws_workspaces).
diff --git a/src/data-databricks-mws-workspaces/index.ts b/src/data-databricks-mws-workspaces/index.ts
index aa56a6ab2..70620d4f0 100644
--- a/src/data-databricks-mws-workspaces/index.ts
+++ b/src/data-databricks-mws-workspaces/index.ts
@@ -126,4 +126,24 @@ export class DataDatabricksMwsWorkspaces extends cdktf.TerraformDataSource {
ids: cdktf.hashMapper(cdktf.numberToTerraform)(this._ids),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.hashMapperHcl(cdktf.numberToHclTerraform)(this._ids),
+ isBlock: false,
+ type: "map",
+ storageClassType: "numberMap",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-node-type/README.md b/src/data-databricks-node-type/README.md
index 31f6968ba..e66bbe61c 100644
--- a/src/data-databricks-node-type/README.md
+++ b/src/data-databricks-node-type/README.md
@@ -1,3 +1,3 @@
# `data_databricks_node_type`
-Refer to the Terraform Registory for docs: [`data_databricks_node_type`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/node_type).
+Refer to the Terraform Registry for docs: [`data_databricks_node_type`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/node_type).
diff --git a/src/data-databricks-node-type/index.ts b/src/data-databricks-node-type/index.ts
index 20428739f..acfe50708 100644
--- a/src/data-databricks-node-type/index.ts
+++ b/src/data-databricks-node-type/index.ts
@@ -390,4 +390,96 @@ export class DataDatabricksNodeType extends cdktf.TerraformDataSource {
support_port_forwarding: cdktf.booleanToTerraform(this._supportPortForwarding),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ category: {
+ value: cdktf.stringToHclTerraform(this._category),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ fleet: {
+ value: cdktf.booleanToHclTerraform(this._fleet),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ gb_per_core: {
+ value: cdktf.numberToHclTerraform(this._gbPerCore),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ graviton: {
+ value: cdktf.booleanToHclTerraform(this._graviton),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ is_io_cache_enabled: {
+ value: cdktf.booleanToHclTerraform(this._isIoCacheEnabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ local_disk: {
+ value: cdktf.booleanToHclTerraform(this._localDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ local_disk_min_size: {
+ value: cdktf.numberToHclTerraform(this._localDiskMinSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_cores: {
+ value: cdktf.numberToHclTerraform(this._minCores),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_gpus: {
+ value: cdktf.numberToHclTerraform(this._minGpus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_memory_gb: {
+ value: cdktf.numberToHclTerraform(this._minMemoryGb),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ photon_driver_capable: {
+ value: cdktf.booleanToHclTerraform(this._photonDriverCapable),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ photon_worker_capable: {
+ value: cdktf.booleanToHclTerraform(this._photonWorkerCapable),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ support_port_forwarding: {
+ value: cdktf.booleanToHclTerraform(this._supportPortForwarding),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-notebook-paths/README.md b/src/data-databricks-notebook-paths/README.md
index bd8ebc8e1..190003971 100644
--- a/src/data-databricks-notebook-paths/README.md
+++ b/src/data-databricks-notebook-paths/README.md
@@ -1,3 +1,3 @@
# `data_databricks_notebook_paths`
-Refer to the Terraform Registory for docs: [`data_databricks_notebook_paths`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/notebook_paths).
+Refer to the Terraform Registry for docs: [`data_databricks_notebook_paths`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/notebook_paths).
diff --git a/src/data-databricks-notebook-paths/index.ts b/src/data-databricks-notebook-paths/index.ts
index c22dffb71..a75fc14f9 100644
--- a/src/data-databricks-notebook-paths/index.ts
+++ b/src/data-databricks-notebook-paths/index.ts
@@ -40,6 +40,17 @@ export function dataDatabricksNotebookPathsNotebookPathListStructToTerraform(str
}
}
+
+export function dataDatabricksNotebookPathsNotebookPathListStructToHclTerraform(struct?: DataDatabricksNotebookPathsNotebookPathListStruct): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ };
+ return attrs;
+}
+
export class DataDatabricksNotebookPathsNotebookPathListStructOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -217,4 +228,30 @@ export class DataDatabricksNotebookPaths extends cdktf.TerraformDataSource {
recursive: cdktf.booleanToTerraform(this._recursive),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ recursive: {
+ value: cdktf.booleanToHclTerraform(this._recursive),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-notebook/README.md b/src/data-databricks-notebook/README.md
index 21774d957..c76064789 100644
--- a/src/data-databricks-notebook/README.md
+++ b/src/data-databricks-notebook/README.md
@@ -1,3 +1,3 @@
# `data_databricks_notebook`
-Refer to the Terraform Registory for docs: [`data_databricks_notebook`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/notebook).
+Refer to the Terraform Registry for docs: [`data_databricks_notebook`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/notebook).
diff --git a/src/data-databricks-notebook/index.ts b/src/data-databricks-notebook/index.ts
index 5c36dfd20..6a231f103 100644
--- a/src/data-databricks-notebook/index.ts
+++ b/src/data-databricks-notebook/index.ts
@@ -213,4 +213,48 @@ export class DataDatabricksNotebook extends cdktf.TerraformDataSource {
path: cdktf.stringToTerraform(this._path),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ format: {
+ value: cdktf.stringToHclTerraform(this._format),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ language: {
+ value: cdktf.stringToHclTerraform(this._language),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ object_id: {
+ value: cdktf.numberToHclTerraform(this._objectId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ object_type: {
+ value: cdktf.stringToHclTerraform(this._objectType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-pipelines/README.md b/src/data-databricks-pipelines/README.md
index 5f18a72b6..eac85c3bf 100644
--- a/src/data-databricks-pipelines/README.md
+++ b/src/data-databricks-pipelines/README.md
@@ -1,3 +1,3 @@
# `data_databricks_pipelines`
-Refer to the Terraform Registory for docs: [`data_databricks_pipelines`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/pipelines).
+Refer to the Terraform Registry for docs: [`data_databricks_pipelines`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/pipelines).
diff --git a/src/data-databricks-pipelines/index.ts b/src/data-databricks-pipelines/index.ts
index bafb3581a..32d94ab6a 100644
--- a/src/data-databricks-pipelines/index.ts
+++ b/src/data-databricks-pipelines/index.ts
@@ -148,4 +148,30 @@ export class DataDatabricksPipelines extends cdktf.TerraformDataSource {
pipeline_name: cdktf.stringToTerraform(this._pipelineName),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._ids),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ pipeline_name: {
+ value: cdktf.stringToHclTerraform(this._pipelineName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-schemas/README.md b/src/data-databricks-schemas/README.md
index 39acf06ad..1539b64eb 100644
--- a/src/data-databricks-schemas/README.md
+++ b/src/data-databricks-schemas/README.md
@@ -1,3 +1,3 @@
# `data_databricks_schemas`
-Refer to the Terraform Registory for docs: [`data_databricks_schemas`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/schemas).
+Refer to the Terraform Registry for docs: [`data_databricks_schemas`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/schemas).
diff --git a/src/data-databricks-schemas/index.ts b/src/data-databricks-schemas/index.ts
index 4b4556b68..6d14ee918 100644
--- a/src/data-databricks-schemas/index.ts
+++ b/src/data-databricks-schemas/index.ts
@@ -145,4 +145,30 @@ export class DataDatabricksSchemas extends cdktf.TerraformDataSource {
ids: cdktf.listMapper(cdktf.stringToTerraform, false)(this._ids),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(this._catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._ids),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-service-principal/README.md b/src/data-databricks-service-principal/README.md
index b4b60c0fd..430b6cf52 100644
--- a/src/data-databricks-service-principal/README.md
+++ b/src/data-databricks-service-principal/README.md
@@ -1,3 +1,3 @@
# `data_databricks_service_principal`
-Refer to the Terraform Registory for docs: [`data_databricks_service_principal`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/service_principal).
+Refer to the Terraform Registry for docs: [`data_databricks_service_principal`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/service_principal).
diff --git a/src/data-databricks-service-principal/index.ts b/src/data-databricks-service-principal/index.ts
index d6b28e799..37b7e64fe 100644
--- a/src/data-databricks-service-principal/index.ts
+++ b/src/data-databricks-service-principal/index.ts
@@ -280,4 +280,66 @@ export class DataDatabricksServicePrincipal extends cdktf.TerraformDataSource {
sp_id: cdktf.stringToTerraform(this._spId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ acl_principal_id: {
+ value: cdktf.stringToHclTerraform(this._aclPrincipalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ active: {
+ value: cdktf.booleanToHclTerraform(this._active),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ application_id: {
+ value: cdktf.stringToHclTerraform(this._applicationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ display_name: {
+ value: cdktf.stringToHclTerraform(this._displayName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ external_id: {
+ value: cdktf.stringToHclTerraform(this._externalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ home: {
+ value: cdktf.stringToHclTerraform(this._home),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repos: {
+ value: cdktf.stringToHclTerraform(this._repos),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ sp_id: {
+ value: cdktf.stringToHclTerraform(this._spId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-service-principals/README.md b/src/data-databricks-service-principals/README.md
index 7b5a0f965..8d87dc650 100644
--- a/src/data-databricks-service-principals/README.md
+++ b/src/data-databricks-service-principals/README.md
@@ -1,3 +1,3 @@
# `data_databricks_service_principals`
-Refer to the Terraform Registory for docs: [`data_databricks_service_principals`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/service_principals).
+Refer to the Terraform Registry for docs: [`data_databricks_service_principals`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/service_principals).
diff --git a/src/data-databricks-service-principals/index.ts b/src/data-databricks-service-principals/index.ts
index a32315002..d8959e2d3 100644
--- a/src/data-databricks-service-principals/index.ts
+++ b/src/data-databricks-service-principals/index.ts
@@ -148,4 +148,30 @@ export class DataDatabricksServicePrincipals extends cdktf.TerraformDataSource {
id: cdktf.stringToTerraform(this._id),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ application_ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._applicationIds),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ display_name_contains: {
+ value: cdktf.stringToHclTerraform(this._displayNameContains),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-share/README.md b/src/data-databricks-share/README.md
index dfd69ca7e..9b37d6100 100644
--- a/src/data-databricks-share/README.md
+++ b/src/data-databricks-share/README.md
@@ -1,3 +1,3 @@
# `data_databricks_share`
-Refer to the Terraform Registory for docs: [`data_databricks_share`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/share).
+Refer to the Terraform Registry for docs: [`data_databricks_share`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/share).
diff --git a/src/data-databricks-share/index.ts b/src/data-databricks-share/index.ts
index 603663801..49d989402 100644
--- a/src/data-databricks-share/index.ts
+++ b/src/data-databricks-share/index.ts
@@ -70,6 +70,43 @@ export function dataDatabricksShareObjectPartitionValueToTerraform(struct?: Data
}
}
+
+export function dataDatabricksShareObjectPartitionValueToHclTerraform(struct?: DataDatabricksShareObjectPartitionValue | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ op: {
+ value: cdktf.stringToHclTerraform(struct!.op),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ recipient_property_key: {
+ value: cdktf.stringToHclTerraform(struct!.recipientPropertyKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksShareObjectPartitionValueOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -229,6 +266,25 @@ export function dataDatabricksShareObjectPartitionToTerraform(struct?: DataDatab
}
}
+
+export function dataDatabricksShareObjectPartitionToHclTerraform(struct?: DataDatabricksShareObjectPartition | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.listMapperHcl(dataDatabricksShareObjectPartitionValueToHclTerraform, true)(struct!.value),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksShareObjectPartitionValueList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksShareObjectPartitionOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -375,6 +431,85 @@ export function dataDatabricksShareObjectToTerraform(struct?: DataDatabricksShar
}
}
+
+export function dataDatabricksShareObjectToHclTerraform(struct?: DataDatabricksShareObject | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ added_at: {
+ value: cdktf.numberToHclTerraform(struct!.addedAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ added_by: {
+ value: cdktf.stringToHclTerraform(struct!.addedBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cdf_enabled: {
+ value: cdktf.booleanToHclTerraform(struct!.cdfEnabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(struct!.comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ data_object_type: {
+ value: cdktf.stringToHclTerraform(struct!.dataObjectType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ history_data_sharing_status: {
+ value: cdktf.stringToHclTerraform(struct!.historyDataSharingStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ shared_as: {
+ value: cdktf.stringToHclTerraform(struct!.sharedAs),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ start_version: {
+ value: cdktf.numberToHclTerraform(struct!.startVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ status: {
+ value: cdktf.stringToHclTerraform(struct!.status),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ partition: {
+ value: cdktf.listMapperHcl(dataDatabricksShareObjectPartitionToHclTerraform, true)(struct!.partition),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksShareObjectPartitionList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksShareObjectOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -825,4 +960,42 @@ export class DataDatabricksShare extends cdktf.TerraformDataSource {
object: cdktf.listMapper(dataDatabricksShareObjectToTerraform, true)(this._object.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ created_at: {
+ value: cdktf.numberToHclTerraform(this._createdAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ created_by: {
+ value: cdktf.stringToHclTerraform(this._createdBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ object: {
+ value: cdktf.listMapperHcl(dataDatabricksShareObjectToHclTerraform, true)(this._object.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "DataDatabricksShareObjectList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-shares/README.md b/src/data-databricks-shares/README.md
index c0af0e4fd..da98beede 100644
--- a/src/data-databricks-shares/README.md
+++ b/src/data-databricks-shares/README.md
@@ -1,3 +1,3 @@
# `data_databricks_shares`
-Refer to the Terraform Registory for docs: [`data_databricks_shares`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/shares).
+Refer to the Terraform Registry for docs: [`data_databricks_shares`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/shares).
diff --git a/src/data-databricks-shares/index.ts b/src/data-databricks-shares/index.ts
index 64ecd94e3..348422550 100644
--- a/src/data-databricks-shares/index.ts
+++ b/src/data-databricks-shares/index.ts
@@ -126,4 +126,24 @@ export class DataDatabricksShares extends cdktf.TerraformDataSource {
shares: cdktf.listMapper(cdktf.stringToTerraform, false)(this._shares),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ shares: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._shares),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-spark-version/README.md b/src/data-databricks-spark-version/README.md
index 1747d229f..27d2252f4 100644
--- a/src/data-databricks-spark-version/README.md
+++ b/src/data-databricks-spark-version/README.md
@@ -1,3 +1,3 @@
# `data_databricks_spark_version`
-Refer to the Terraform Registory for docs: [`data_databricks_spark_version`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/spark_version).
+Refer to the Terraform Registry for docs: [`data_databricks_spark_version`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/spark_version).
diff --git a/src/data-databricks-spark-version/index.ts b/src/data-databricks-spark-version/index.ts
index 14db21240..0d0f7098d 100644
--- a/src/data-databricks-spark-version/index.ts
+++ b/src/data-databricks-spark-version/index.ts
@@ -324,4 +324,78 @@ export class DataDatabricksSparkVersion extends cdktf.TerraformDataSource {
spark_version: cdktf.stringToTerraform(this._sparkVersion),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ beta: {
+ value: cdktf.booleanToHclTerraform(this._beta),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ genomics: {
+ value: cdktf.booleanToHclTerraform(this._genomics),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ gpu: {
+ value: cdktf.booleanToHclTerraform(this._gpu),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ graviton: {
+ value: cdktf.booleanToHclTerraform(this._graviton),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ latest: {
+ value: cdktf.booleanToHclTerraform(this._latest),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ long_term_support: {
+ value: cdktf.booleanToHclTerraform(this._longTermSupport),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ ml: {
+ value: cdktf.booleanToHclTerraform(this._ml),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ photon: {
+ value: cdktf.booleanToHclTerraform(this._photon),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ scala: {
+ value: cdktf.stringToHclTerraform(this._scala),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_version: {
+ value: cdktf.stringToHclTerraform(this._sparkVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-sql-warehouse/README.md b/src/data-databricks-sql-warehouse/README.md
index c59222669..a45306e57 100644
--- a/src/data-databricks-sql-warehouse/README.md
+++ b/src/data-databricks-sql-warehouse/README.md
@@ -1,3 +1,3 @@
# `data_databricks_sql_warehouse`
-Refer to the Terraform Registory for docs: [`data_databricks_sql_warehouse`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/sql_warehouse).
+Refer to the Terraform Registry for docs: [`data_databricks_sql_warehouse`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/sql_warehouse).
diff --git a/src/data-databricks-sql-warehouse/index.ts b/src/data-databricks-sql-warehouse/index.ts
index 72aadbf0b..ce210e5fe 100644
--- a/src/data-databricks-sql-warehouse/index.ts
+++ b/src/data-databricks-sql-warehouse/index.ts
@@ -107,6 +107,25 @@ export function dataDatabricksSqlWarehouseChannelToTerraform(struct?: DataDatabr
}
}
+
+export function dataDatabricksSqlWarehouseChannelToHclTerraform(struct?: DataDatabricksSqlWarehouseChannelOutputReference | DataDatabricksSqlWarehouseChannel): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksSqlWarehouseChannelOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -192,6 +211,49 @@ export function dataDatabricksSqlWarehouseOdbcParamsToTerraform(struct?: DataDat
}
}
+
+export function dataDatabricksSqlWarehouseOdbcParamsToHclTerraform(struct?: DataDatabricksSqlWarehouseOdbcParamsOutputReference | DataDatabricksSqlWarehouseOdbcParams): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ host: {
+ value: cdktf.stringToHclTerraform(struct!.host),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ hostname: {
+ value: cdktf.stringToHclTerraform(struct!.hostname),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(struct!.path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ port: {
+ value: cdktf.numberToHclTerraform(struct!.port),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ protocol: {
+ value: cdktf.stringToHclTerraform(struct!.protocol),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksSqlWarehouseOdbcParamsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -341,6 +403,31 @@ export function dataDatabricksSqlWarehouseTagsCustomTagsToTerraform(struct?: Dat
}
}
+
+export function dataDatabricksSqlWarehouseTagsCustomTagsToHclTerraform(struct?: DataDatabricksSqlWarehouseTagsCustomTags | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ key: {
+ value: cdktf.stringToHclTerraform(struct!.key),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksSqlWarehouseTagsCustomTagsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -456,6 +543,25 @@ export function dataDatabricksSqlWarehouseTagsToTerraform(struct?: DataDatabrick
}
}
+
+export function dataDatabricksSqlWarehouseTagsToHclTerraform(struct?: DataDatabricksSqlWarehouseTagsOutputReference | DataDatabricksSqlWarehouseTags): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ custom_tags: {
+ value: cdktf.listMapperHcl(dataDatabricksSqlWarehouseTagsCustomTagsToHclTerraform, true)(struct!.customTags),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksSqlWarehouseTagsCustomTagsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DataDatabricksSqlWarehouseTagsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -873,4 +979,114 @@ export class DataDatabricksSqlWarehouse extends cdktf.TerraformDataSource {
tags: dataDatabricksSqlWarehouseTagsToTerraform(this._tags.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ auto_stop_mins: {
+ value: cdktf.numberToHclTerraform(this._autoStopMins),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_size: {
+ value: cdktf.stringToHclTerraform(this._clusterSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ data_source_id: {
+ value: cdktf.stringToHclTerraform(this._dataSourceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_photon: {
+ value: cdktf.booleanToHclTerraform(this._enablePhoton),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_serverless_compute: {
+ value: cdktf.booleanToHclTerraform(this._enableServerlessCompute),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(this._instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jdbc_url: {
+ value: cdktf.stringToHclTerraform(this._jdbcUrl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ max_num_clusters: {
+ value: cdktf.numberToHclTerraform(this._maxNumClusters),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_num_clusters: {
+ value: cdktf.numberToHclTerraform(this._minNumClusters),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_clusters: {
+ value: cdktf.numberToHclTerraform(this._numClusters),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_instance_policy: {
+ value: cdktf.stringToHclTerraform(this._spotInstancePolicy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ state: {
+ value: cdktf.stringToHclTerraform(this._state),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ channel: {
+ value: dataDatabricksSqlWarehouseChannelToHclTerraform(this._channel.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksSqlWarehouseChannelList",
+ },
+ odbc_params: {
+ value: dataDatabricksSqlWarehouseOdbcParamsToHclTerraform(this._odbcParams.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksSqlWarehouseOdbcParamsList",
+ },
+ tags: {
+ value: dataDatabricksSqlWarehouseTagsToHclTerraform(this._tags.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DataDatabricksSqlWarehouseTagsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-sql-warehouses/README.md b/src/data-databricks-sql-warehouses/README.md
index 31c28bd3d..5120c037e 100644
--- a/src/data-databricks-sql-warehouses/README.md
+++ b/src/data-databricks-sql-warehouses/README.md
@@ -1,3 +1,3 @@
# `data_databricks_sql_warehouses`
-Refer to the Terraform Registory for docs: [`data_databricks_sql_warehouses`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/sql_warehouses).
+Refer to the Terraform Registry for docs: [`data_databricks_sql_warehouses`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/sql_warehouses).
diff --git a/src/data-databricks-sql-warehouses/index.ts b/src/data-databricks-sql-warehouses/index.ts
index 67d7d49ae..34047ab7d 100644
--- a/src/data-databricks-sql-warehouses/index.ts
+++ b/src/data-databricks-sql-warehouses/index.ts
@@ -148,4 +148,30 @@ export class DataDatabricksSqlWarehouses extends cdktf.TerraformDataSource {
warehouse_name_contains: cdktf.stringToTerraform(this._warehouseNameContains),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._ids),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ warehouse_name_contains: {
+ value: cdktf.stringToHclTerraform(this._warehouseNameContains),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-tables/README.md b/src/data-databricks-tables/README.md
index b345ac035..b65de9a72 100644
--- a/src/data-databricks-tables/README.md
+++ b/src/data-databricks-tables/README.md
@@ -1,3 +1,3 @@
# `data_databricks_tables`
-Refer to the Terraform Registory for docs: [`data_databricks_tables`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/tables).
+Refer to the Terraform Registry for docs: [`data_databricks_tables`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/tables).
diff --git a/src/data-databricks-tables/index.ts b/src/data-databricks-tables/index.ts
index 51feff045..7a2a7dcad 100644
--- a/src/data-databricks-tables/index.ts
+++ b/src/data-databricks-tables/index.ts
@@ -164,4 +164,36 @@ export class DataDatabricksTables extends cdktf.TerraformDataSource {
schema_name: cdktf.stringToTerraform(this._schemaName),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(this._catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._ids),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ schema_name: {
+ value: cdktf.stringToHclTerraform(this._schemaName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-user/README.md b/src/data-databricks-user/README.md
index 0eb253d2d..d9ad40f45 100644
--- a/src/data-databricks-user/README.md
+++ b/src/data-databricks-user/README.md
@@ -1,3 +1,3 @@
# `data_databricks_user`
-Refer to the Terraform Registory for docs: [`data_databricks_user`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/user).
+Refer to the Terraform Registry for docs: [`data_databricks_user`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/user).
diff --git a/src/data-databricks-user/index.ts b/src/data-databricks-user/index.ts
index 74d2e810b..e7d3412c3 100644
--- a/src/data-databricks-user/index.ts
+++ b/src/data-databricks-user/index.ts
@@ -183,4 +183,30 @@ export class DataDatabricksUser extends cdktf.TerraformDataSource {
user_name: cdktf.stringToTerraform(this._userName),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_id: {
+ value: cdktf.stringToHclTerraform(this._userId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_name: {
+ value: cdktf.stringToHclTerraform(this._userName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-views/README.md b/src/data-databricks-views/README.md
index 819d0cc3b..d0e644cf7 100644
--- a/src/data-databricks-views/README.md
+++ b/src/data-databricks-views/README.md
@@ -1,3 +1,3 @@
# `data_databricks_views`
-Refer to the Terraform Registory for docs: [`data_databricks_views`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/views).
+Refer to the Terraform Registry for docs: [`data_databricks_views`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/views).
diff --git a/src/data-databricks-views/index.ts b/src/data-databricks-views/index.ts
index dc968609b..75b19fade 100644
--- a/src/data-databricks-views/index.ts
+++ b/src/data-databricks-views/index.ts
@@ -164,4 +164,36 @@ export class DataDatabricksViews extends cdktf.TerraformDataSource {
schema_name: cdktf.stringToTerraform(this._schemaName),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(this._catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._ids),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ schema_name: {
+ value: cdktf.stringToHclTerraform(this._schemaName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/data-databricks-zones/README.md b/src/data-databricks-zones/README.md
index 296681928..3d4198fad 100644
--- a/src/data-databricks-zones/README.md
+++ b/src/data-databricks-zones/README.md
@@ -1,3 +1,3 @@
# `data_databricks_zones`
-Refer to the Terraform Registory for docs: [`data_databricks_zones`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/zones).
+Refer to the Terraform Registry for docs: [`data_databricks_zones`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/data-sources/zones).
diff --git a/src/data-databricks-zones/index.ts b/src/data-databricks-zones/index.ts
index 545e898cf..2039beff1 100644
--- a/src/data-databricks-zones/index.ts
+++ b/src/data-databricks-zones/index.ts
@@ -114,4 +114,18 @@ export class DataDatabricksZones extends cdktf.TerraformDataSource {
id: cdktf.stringToTerraform(this._id),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/dbfs-file/README.md b/src/dbfs-file/README.md
index e80f92eb0..c10c6b439 100644
--- a/src/dbfs-file/README.md
+++ b/src/dbfs-file/README.md
@@ -1,3 +1,3 @@
# `databricks_dbfs_file`
-Refer to the Terraform Registory for docs: [`databricks_dbfs_file`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/dbfs_file).
+Refer to the Terraform Registry for docs: [`databricks_dbfs_file`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/dbfs_file).
diff --git a/src/dbfs-file/index.ts b/src/dbfs-file/index.ts
index 840fd8ed7..d7a6b8a24 100644
--- a/src/dbfs-file/index.ts
+++ b/src/dbfs-file/index.ts
@@ -199,4 +199,42 @@ export class DbfsFile extends cdktf.TerraformResource {
source: cdktf.stringToTerraform(this._source),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ content_base64: {
+ value: cdktf.stringToHclTerraform(this._contentBase64),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ md5: {
+ value: cdktf.stringToHclTerraform(this._md5),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(this._source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/default-namespace-setting/README.md b/src/default-namespace-setting/README.md
index a1a88187b..221b38032 100644
--- a/src/default-namespace-setting/README.md
+++ b/src/default-namespace-setting/README.md
@@ -1,3 +1,3 @@
# `databricks_default_namespace_setting`
-Refer to the Terraform Registory for docs: [`databricks_default_namespace_setting`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/default_namespace_setting).
+Refer to the Terraform Registry for docs: [`databricks_default_namespace_setting`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/default_namespace_setting).
diff --git a/src/default-namespace-setting/index.ts b/src/default-namespace-setting/index.ts
index 204611e16..b6efd0544 100644
--- a/src/default-namespace-setting/index.ts
+++ b/src/default-namespace-setting/index.ts
@@ -51,6 +51,25 @@ export function defaultNamespaceSettingNamespaceToTerraform(struct?: DefaultName
}
}
+
+export function defaultNamespaceSettingNamespaceToHclTerraform(struct?: DefaultNamespaceSettingNamespaceOutputReference | DefaultNamespaceSettingNamespace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class DefaultNamespaceSettingNamespaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -234,4 +253,36 @@ export class DefaultNamespaceSetting extends cdktf.TerraformResource {
namespace: defaultNamespaceSettingNamespaceToTerraform(this._namespace.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ etag: {
+ value: cdktf.stringToHclTerraform(this._etag),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ setting_name: {
+ value: cdktf.stringToHclTerraform(this._settingName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ namespace: {
+ value: defaultNamespaceSettingNamespaceToHclTerraform(this._namespace.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "DefaultNamespaceSettingNamespaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/directory/README.md b/src/directory/README.md
index 5754eba4c..b1413c6b2 100644
--- a/src/directory/README.md
+++ b/src/directory/README.md
@@ -1,3 +1,3 @@
# `databricks_directory`
-Refer to the Terraform Registory for docs: [`databricks_directory`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/directory).
+Refer to the Terraform Registry for docs: [`databricks_directory`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/directory).
diff --git a/src/directory/index.ts b/src/directory/index.ts
index 7cd78387c..24938e01a 100644
--- a/src/directory/index.ts
+++ b/src/directory/index.ts
@@ -167,4 +167,36 @@ export class Directory extends cdktf.TerraformResource {
path: cdktf.stringToTerraform(this._path),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ delete_recursive: {
+ value: cdktf.booleanToHclTerraform(this._deleteRecursive),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ object_id: {
+ value: cdktf.numberToHclTerraform(this._objectId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/entitlements/README.md b/src/entitlements/README.md
index f65ac1409..8db4594d3 100644
--- a/src/entitlements/README.md
+++ b/src/entitlements/README.md
@@ -1,3 +1,3 @@
# `databricks_entitlements`
-Refer to the Terraform Registory for docs: [`databricks_entitlements`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/entitlements).
+Refer to the Terraform Registry for docs: [`databricks_entitlements`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/entitlements).
diff --git a/src/entitlements/index.ts b/src/entitlements/index.ts
index 7e3a9b09b..f7400acb1 100644
--- a/src/entitlements/index.ts
+++ b/src/entitlements/index.ts
@@ -258,4 +258,60 @@ export class Entitlements extends cdktf.TerraformResource {
workspace_access: cdktf.booleanToTerraform(this._workspaceAccess),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ allow_cluster_create: {
+ value: cdktf.booleanToHclTerraform(this._allowClusterCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ allow_instance_pool_create: {
+ value: cdktf.booleanToHclTerraform(this._allowInstancePoolCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ databricks_sql_access: {
+ value: cdktf.booleanToHclTerraform(this._databricksSqlAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ group_id: {
+ value: cdktf.stringToHclTerraform(this._groupId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ service_principal_id: {
+ value: cdktf.stringToHclTerraform(this._servicePrincipalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_id: {
+ value: cdktf.stringToHclTerraform(this._userId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_access: {
+ value: cdktf.booleanToHclTerraform(this._workspaceAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/external-location/README.md b/src/external-location/README.md
index 6dea6d041..4f982bb0f 100644
--- a/src/external-location/README.md
+++ b/src/external-location/README.md
@@ -1,3 +1,3 @@
# `databricks_external_location`
-Refer to the Terraform Registory for docs: [`databricks_external_location`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/external_location).
+Refer to the Terraform Registry for docs: [`databricks_external_location`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/external_location).
diff --git a/src/external-location/index.ts b/src/external-location/index.ts
index 476227f74..fd8b37cb4 100644
--- a/src/external-location/index.ts
+++ b/src/external-location/index.ts
@@ -92,6 +92,31 @@ export function externalLocationEncryptionDetailsSseEncryptionDetailsToTerraform
}
}
+
+export function externalLocationEncryptionDetailsSseEncryptionDetailsToHclTerraform(struct?: ExternalLocationEncryptionDetailsSseEncryptionDetailsOutputReference | ExternalLocationEncryptionDetailsSseEncryptionDetails): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ algorithm: {
+ value: cdktf.stringToHclTerraform(struct!.algorithm),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ aws_kms_key_arn: {
+ value: cdktf.stringToHclTerraform(struct!.awsKmsKeyArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ExternalLocationEncryptionDetailsSseEncryptionDetailsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -181,6 +206,25 @@ export function externalLocationEncryptionDetailsToTerraform(struct?: ExternalLo
}
}
+
+export function externalLocationEncryptionDetailsToHclTerraform(struct?: ExternalLocationEncryptionDetailsOutputReference | ExternalLocationEncryptionDetails): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ sse_encryption_details: {
+ value: externalLocationEncryptionDetailsSseEncryptionDetailsToHclTerraform(struct!.sseEncryptionDetails),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ExternalLocationEncryptionDetailsSseEncryptionDetailsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ExternalLocationEncryptionDetailsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -520,4 +564,90 @@ export class ExternalLocation extends cdktf.TerraformResource {
encryption_details: externalLocationEncryptionDetailsToTerraform(this._encryptionDetails.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ access_point: {
+ value: cdktf.stringToHclTerraform(this._accessPoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ credential_name: {
+ value: cdktf.stringToHclTerraform(this._credentialName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ force_destroy: {
+ value: cdktf.booleanToHclTerraform(this._forceDestroy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ force_update: {
+ value: cdktf.booleanToHclTerraform(this._forceUpdate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ read_only: {
+ value: cdktf.booleanToHclTerraform(this._readOnly),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ skip_validation: {
+ value: cdktf.booleanToHclTerraform(this._skipValidation),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ url: {
+ value: cdktf.stringToHclTerraform(this._url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ encryption_details: {
+ value: externalLocationEncryptionDetailsToHclTerraform(this._encryptionDetails.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ExternalLocationEncryptionDetailsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/git-credential/README.md b/src/git-credential/README.md
index fe47f3d5c..fc9be0b29 100644
--- a/src/git-credential/README.md
+++ b/src/git-credential/README.md
@@ -1,3 +1,3 @@
# `databricks_git_credential`
-Refer to the Terraform Registory for docs: [`databricks_git_credential`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/git_credential).
+Refer to the Terraform Registry for docs: [`databricks_git_credential`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/git_credential).
diff --git a/src/git-credential/index.ts b/src/git-credential/index.ts
index 7ecbffd9f..e1e3fff51 100644
--- a/src/git-credential/index.ts
+++ b/src/git-credential/index.ts
@@ -189,4 +189,42 @@ export class GitCredential extends cdktf.TerraformResource {
personal_access_token: cdktf.stringToTerraform(this._personalAccessToken),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ force: {
+ value: cdktf.booleanToHclTerraform(this._force),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ git_provider: {
+ value: cdktf.stringToHclTerraform(this._gitProvider),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ git_username: {
+ value: cdktf.stringToHclTerraform(this._gitUsername),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ personal_access_token: {
+ value: cdktf.stringToHclTerraform(this._personalAccessToken),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/global-init-script/README.md b/src/global-init-script/README.md
index 99cbb3c66..552ee2e2d 100644
--- a/src/global-init-script/README.md
+++ b/src/global-init-script/README.md
@@ -1,3 +1,3 @@
# `databricks_global_init_script`
-Refer to the Terraform Registory for docs: [`databricks_global_init_script`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/global_init_script).
+Refer to the Terraform Registry for docs: [`databricks_global_init_script`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/global_init_script).
diff --git a/src/global-init-script/index.ts b/src/global-init-script/index.ts
index f647f9714..404a8ea02 100644
--- a/src/global-init-script/index.ts
+++ b/src/global-init-script/index.ts
@@ -62,6 +62,17 @@ export function globalInitScriptTimeoutsToTerraform(struct?: GlobalInitScriptTim
}
}
+
+export function globalInitScriptTimeoutsToHclTerraform(struct?: GlobalInitScriptTimeouts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ };
+ return attrs;
+}
+
export class GlobalInitScriptTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -302,4 +313,60 @@ export class GlobalInitScript extends cdktf.TerraformResource {
timeouts: globalInitScriptTimeoutsToTerraform(this._timeouts.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ content_base64: {
+ value: cdktf.stringToHclTerraform(this._contentBase64),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enabled: {
+ value: cdktf.booleanToHclTerraform(this._enabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ md5: {
+ value: cdktf.stringToHclTerraform(this._md5),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ position: {
+ value: cdktf.numberToHclTerraform(this._position),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(this._source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ timeouts: {
+ value: globalInitScriptTimeoutsToHclTerraform(this._timeouts.internalValue),
+ isBlock: true,
+ type: "struct",
+ storageClassType: "GlobalInitScriptTimeouts",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/grants/README.md b/src/grants/README.md
index 72b5c821a..28d8c63fd 100644
--- a/src/grants/README.md
+++ b/src/grants/README.md
@@ -1,3 +1,3 @@
# `databricks_grants`
-Refer to the Terraform Registory for docs: [`databricks_grants`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/grants).
+Refer to the Terraform Registry for docs: [`databricks_grants`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/grants).
diff --git a/src/grants/index.ts b/src/grants/index.ts
index f300af5a4..fde1e3d7e 100644
--- a/src/grants/index.ts
+++ b/src/grants/index.ts
@@ -100,6 +100,31 @@ export function grantsGrantToTerraform(struct?: GrantsGrant | cdktf.IResolvable)
}
}
+
+export function grantsGrantToHclTerraform(struct?: GrantsGrant | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ principal: {
+ value: cdktf.stringToHclTerraform(struct!.principal),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ privileges: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.privileges),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class GrantsGrantOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -529,4 +554,102 @@ export class Grants extends cdktf.TerraformResource {
grant: cdktf.listMapper(grantsGrantToTerraform, true)(this._grant.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ catalog: {
+ value: cdktf.stringToHclTerraform(this._catalog),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ external_location: {
+ value: cdktf.stringToHclTerraform(this._externalLocation),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ foreign_connection: {
+ value: cdktf.stringToHclTerraform(this._foreignConnection),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ function: {
+ value: cdktf.stringToHclTerraform(this._function),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ materialized_view: {
+ value: cdktf.stringToHclTerraform(this._materializedView),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore: {
+ value: cdktf.stringToHclTerraform(this._metastore),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ model: {
+ value: cdktf.stringToHclTerraform(this._model),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ schema: {
+ value: cdktf.stringToHclTerraform(this._schema),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ share: {
+ value: cdktf.stringToHclTerraform(this._share),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_credential: {
+ value: cdktf.stringToHclTerraform(this._storageCredential),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ table: {
+ value: cdktf.stringToHclTerraform(this._table),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ view: {
+ value: cdktf.stringToHclTerraform(this._view),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ volume: {
+ value: cdktf.stringToHclTerraform(this._volume),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ grant: {
+ value: cdktf.listMapperHcl(grantsGrantToHclTerraform, true)(this._grant.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "GrantsGrantList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/group-instance-profile/README.md b/src/group-instance-profile/README.md
index 517de734d..8e1d736c6 100644
--- a/src/group-instance-profile/README.md
+++ b/src/group-instance-profile/README.md
@@ -1,3 +1,3 @@
# `databricks_group_instance_profile`
-Refer to the Terraform Registory for docs: [`databricks_group_instance_profile`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/group_instance_profile).
+Refer to the Terraform Registry for docs: [`databricks_group_instance_profile`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/group_instance_profile).
diff --git a/src/group-instance-profile/index.ts b/src/group-instance-profile/index.ts
index 16e8401c6..dc0989160 100644
--- a/src/group-instance-profile/index.ts
+++ b/src/group-instance-profile/index.ts
@@ -142,4 +142,30 @@ export class GroupInstanceProfile extends cdktf.TerraformResource {
instance_profile_id: cdktf.stringToTerraform(this._instanceProfileId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ group_id: {
+ value: cdktf.stringToHclTerraform(this._groupId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profile_id: {
+ value: cdktf.stringToHclTerraform(this._instanceProfileId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/group-member/README.md b/src/group-member/README.md
index e143cd202..619f18f6d 100644
--- a/src/group-member/README.md
+++ b/src/group-member/README.md
@@ -1,3 +1,3 @@
# `databricks_group_member`
-Refer to the Terraform Registory for docs: [`databricks_group_member`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/group_member).
+Refer to the Terraform Registry for docs: [`databricks_group_member`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/group_member).
diff --git a/src/group-member/index.ts b/src/group-member/index.ts
index e7ae07d04..c3e650686 100644
--- a/src/group-member/index.ts
+++ b/src/group-member/index.ts
@@ -142,4 +142,30 @@ export class GroupMember extends cdktf.TerraformResource {
member_id: cdktf.stringToTerraform(this._memberId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ group_id: {
+ value: cdktf.stringToHclTerraform(this._groupId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ member_id: {
+ value: cdktf.stringToHclTerraform(this._memberId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/group-role/README.md b/src/group-role/README.md
index 52bbbc4d9..d5c1294a0 100644
--- a/src/group-role/README.md
+++ b/src/group-role/README.md
@@ -1,3 +1,3 @@
# `databricks_group_role`
-Refer to the Terraform Registory for docs: [`databricks_group_role`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/group_role).
+Refer to the Terraform Registry for docs: [`databricks_group_role`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/group_role).
diff --git a/src/group-role/index.ts b/src/group-role/index.ts
index 62fa59c73..98fce13fc 100644
--- a/src/group-role/index.ts
+++ b/src/group-role/index.ts
@@ -142,4 +142,30 @@ export class GroupRole extends cdktf.TerraformResource {
role: cdktf.stringToTerraform(this._role),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ group_id: {
+ value: cdktf.stringToHclTerraform(this._groupId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ role: {
+ value: cdktf.stringToHclTerraform(this._role),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/group/README.md b/src/group/README.md
index fa702c5b4..1c71bf10f 100644
--- a/src/group/README.md
+++ b/src/group/README.md
@@ -1,3 +1,3 @@
# `databricks_group`
-Refer to the Terraform Registory for docs: [`databricks_group`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/group).
+Refer to the Terraform Registry for docs: [`databricks_group`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/group).
diff --git a/src/group/index.ts b/src/group/index.ts
index 6b7d77fd1..ed56f34d1 100644
--- a/src/group/index.ts
+++ b/src/group/index.ts
@@ -299,4 +299,72 @@ export class Group extends cdktf.TerraformResource {
workspace_access: cdktf.booleanToTerraform(this._workspaceAccess),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ acl_principal_id: {
+ value: cdktf.stringToHclTerraform(this._aclPrincipalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ allow_cluster_create: {
+ value: cdktf.booleanToHclTerraform(this._allowClusterCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ allow_instance_pool_create: {
+ value: cdktf.booleanToHclTerraform(this._allowInstancePoolCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ databricks_sql_access: {
+ value: cdktf.booleanToHclTerraform(this._databricksSqlAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ display_name: {
+ value: cdktf.stringToHclTerraform(this._displayName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ external_id: {
+ value: cdktf.stringToHclTerraform(this._externalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ force: {
+ value: cdktf.booleanToHclTerraform(this._force),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ url: {
+ value: cdktf.stringToHclTerraform(this._url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_access: {
+ value: cdktf.booleanToHclTerraform(this._workspaceAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/instance-pool/README.md b/src/instance-pool/README.md
index 7aae02323..f785d9b78 100644
--- a/src/instance-pool/README.md
+++ b/src/instance-pool/README.md
@@ -1,3 +1,3 @@
# `databricks_instance_pool`
-Refer to the Terraform Registory for docs: [`databricks_instance_pool`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/instance_pool).
+Refer to the Terraform Registry for docs: [`databricks_instance_pool`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/instance_pool).
diff --git a/src/instance-pool/index.ts b/src/instance-pool/index.ts
index 21f6d415f..c56c1cdf6 100644
--- a/src/instance-pool/index.ts
+++ b/src/instance-pool/index.ts
@@ -119,6 +119,37 @@ export function instancePoolAwsAttributesToTerraform(struct?: InstancePoolAwsAtt
}
}
+
+export function instancePoolAwsAttributesToHclTerraform(struct?: InstancePoolAwsAttributesOutputReference | InstancePoolAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -233,6 +264,31 @@ export function instancePoolAzureAttributesToTerraform(struct?: InstancePoolAzur
}
}
+
+export function instancePoolAzureAttributesToHclTerraform(struct?: InstancePoolAzureAttributesOutputReference | InstancePoolAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -325,6 +381,31 @@ export function instancePoolDiskSpecDiskTypeToTerraform(struct?: InstancePoolDis
}
}
+
+export function instancePoolDiskSpecDiskTypeToHclTerraform(struct?: InstancePoolDiskSpecDiskTypeOutputReference | InstancePoolDiskSpecDiskType): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ azure_disk_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.azureDiskVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolDiskSpecDiskTypeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -424,6 +505,37 @@ export function instancePoolDiskSpecToTerraform(struct?: InstancePoolDiskSpecOut
}
}
+
+export function instancePoolDiskSpecToHclTerraform(struct?: InstancePoolDiskSpecOutputReference | InstancePoolDiskSpec): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ disk_count: {
+ value: cdktf.numberToHclTerraform(struct!.diskCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.diskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ disk_type: {
+ value: instancePoolDiskSpecDiskTypeToHclTerraform(struct!.diskType),
+ isBlock: true,
+ type: "list",
+ storageClassType: "InstancePoolDiskSpecDiskTypeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolDiskSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -538,6 +650,31 @@ export function instancePoolGcpAttributesToTerraform(struct?: InstancePoolGcpAtt
}
}
+
+export function instancePoolGcpAttributesToHclTerraform(struct?: InstancePoolGcpAttributesOutputReference | InstancePoolGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ gcp_availability: {
+ value: cdktf.stringToHclTerraform(struct!.gcpAvailability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -630,6 +767,31 @@ export function instancePoolInstancePoolFleetAttributesFleetOnDemandOptionToTerr
}
}
+
+export function instancePoolInstancePoolFleetAttributesFleetOnDemandOptionToHclTerraform(struct?: InstancePoolInstancePoolFleetAttributesFleetOnDemandOptionOutputReference | InstancePoolInstancePoolFleetAttributesFleetOnDemandOption): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ allocation_strategy: {
+ value: cdktf.stringToHclTerraform(struct!.allocationStrategy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pools_to_use_count: {
+ value: cdktf.numberToHclTerraform(struct!.instancePoolsToUseCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolInstancePoolFleetAttributesFleetOnDemandOptionOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -719,6 +881,31 @@ export function instancePoolInstancePoolFleetAttributesFleetSpotOptionToTerrafor
}
}
+
+export function instancePoolInstancePoolFleetAttributesFleetSpotOptionToHclTerraform(struct?: InstancePoolInstancePoolFleetAttributesFleetSpotOptionOutputReference | InstancePoolInstancePoolFleetAttributesFleetSpotOption): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ allocation_strategy: {
+ value: cdktf.stringToHclTerraform(struct!.allocationStrategy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pools_to_use_count: {
+ value: cdktf.numberToHclTerraform(struct!.instancePoolsToUseCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolInstancePoolFleetAttributesFleetSpotOptionOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -808,6 +995,31 @@ export function instancePoolInstancePoolFleetAttributesLaunchTemplateOverrideToT
}
}
+
+export function instancePoolInstancePoolFleetAttributesLaunchTemplateOverrideToHclTerraform(struct?: InstancePoolInstancePoolFleetAttributesLaunchTemplateOverride | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability_zone: {
+ value: cdktf.stringToHclTerraform(struct!.availabilityZone),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_type: {
+ value: cdktf.stringToHclTerraform(struct!.instanceType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolInstancePoolFleetAttributesLaunchTemplateOverrideOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -937,6 +1149,37 @@ export function instancePoolInstancePoolFleetAttributesToTerraform(struct?: Inst
}
}
+
+export function instancePoolInstancePoolFleetAttributesToHclTerraform(struct?: InstancePoolInstancePoolFleetAttributesOutputReference | InstancePoolInstancePoolFleetAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ fleet_on_demand_option: {
+ value: instancePoolInstancePoolFleetAttributesFleetOnDemandOptionToHclTerraform(struct!.fleetOnDemandOption),
+ isBlock: true,
+ type: "list",
+ storageClassType: "InstancePoolInstancePoolFleetAttributesFleetOnDemandOptionList",
+ },
+ fleet_spot_option: {
+ value: instancePoolInstancePoolFleetAttributesFleetSpotOptionToHclTerraform(struct!.fleetSpotOption),
+ isBlock: true,
+ type: "list",
+ storageClassType: "InstancePoolInstancePoolFleetAttributesFleetSpotOptionList",
+ },
+ launch_template_override: {
+ value: cdktf.listMapperHcl(instancePoolInstancePoolFleetAttributesLaunchTemplateOverrideToHclTerraform, true)(struct!.launchTemplateOverride),
+ isBlock: true,
+ type: "set",
+ storageClassType: "InstancePoolInstancePoolFleetAttributesLaunchTemplateOverrideList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolInstancePoolFleetAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1048,6 +1291,31 @@ export function instancePoolPreloadedDockerImageBasicAuthToTerraform(struct?: In
}
}
+
+export function instancePoolPreloadedDockerImageBasicAuthToHclTerraform(struct?: InstancePoolPreloadedDockerImageBasicAuthOutputReference | InstancePoolPreloadedDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolPreloadedDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1136,6 +1404,31 @@ export function instancePoolPreloadedDockerImageToTerraform(struct?: InstancePoo
}
}
+
+export function instancePoolPreloadedDockerImageToHclTerraform(struct?: InstancePoolPreloadedDockerImage | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: instancePoolPreloadedDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "InstancePoolPreloadedDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class InstancePoolPreloadedDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1583,4 +1876,108 @@ export class InstancePool extends cdktf.TerraformResource {
preloaded_docker_image: cdktf.listMapper(instancePoolPreloadedDockerImageToTerraform, true)(this._preloadedDockerImage.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(this._enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ idle_instance_autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(this._idleInstanceAutoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(this._instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_name: {
+ value: cdktf.stringToHclTerraform(this._instancePoolName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ max_capacity: {
+ value: cdktf.numberToHclTerraform(this._maxCapacity),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_idle_instances: {
+ value: cdktf.numberToHclTerraform(this._minIdleInstances),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(this._nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ preloaded_spark_versions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._preloadedSparkVersions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ aws_attributes: {
+ value: instancePoolAwsAttributesToHclTerraform(this._awsAttributes.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "InstancePoolAwsAttributesList",
+ },
+ azure_attributes: {
+ value: instancePoolAzureAttributesToHclTerraform(this._azureAttributes.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "InstancePoolAzureAttributesList",
+ },
+ disk_spec: {
+ value: instancePoolDiskSpecToHclTerraform(this._diskSpec.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "InstancePoolDiskSpecList",
+ },
+ gcp_attributes: {
+ value: instancePoolGcpAttributesToHclTerraform(this._gcpAttributes.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "InstancePoolGcpAttributesList",
+ },
+ instance_pool_fleet_attributes: {
+ value: instancePoolInstancePoolFleetAttributesToHclTerraform(this._instancePoolFleetAttributes.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "InstancePoolInstancePoolFleetAttributesList",
+ },
+ preloaded_docker_image: {
+ value: cdktf.listMapperHcl(instancePoolPreloadedDockerImageToHclTerraform, true)(this._preloadedDockerImage.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "InstancePoolPreloadedDockerImageList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/instance-profile/README.md b/src/instance-profile/README.md
index b22c81796..04090add0 100644
--- a/src/instance-profile/README.md
+++ b/src/instance-profile/README.md
@@ -1,3 +1,3 @@
# `databricks_instance_profile`
-Refer to the Terraform Registory for docs: [`databricks_instance_profile`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/instance_profile).
+Refer to the Terraform Registry for docs: [`databricks_instance_profile`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/instance_profile).
diff --git a/src/instance-profile/index.ts b/src/instance-profile/index.ts
index 019af8df0..487f6c4ee 100644
--- a/src/instance-profile/index.ts
+++ b/src/instance-profile/index.ts
@@ -189,4 +189,42 @@ export class InstanceProfile extends cdktf.TerraformResource {
skip_validation: cdktf.booleanToTerraform(this._skipValidation),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ iam_role_arn: {
+ value: cdktf.stringToHclTerraform(this._iamRoleArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(this._instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ is_meta_instance_profile: {
+ value: cdktf.booleanToHclTerraform(this._isMetaInstanceProfile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ skip_validation: {
+ value: cdktf.booleanToHclTerraform(this._skipValidation),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/ip-access-list/README.md b/src/ip-access-list/README.md
index be30ec1be..eba303193 100644
--- a/src/ip-access-list/README.md
+++ b/src/ip-access-list/README.md
@@ -1,3 +1,3 @@
# `databricks_ip_access_list`
-Refer to the Terraform Registory for docs: [`databricks_ip_access_list`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/ip_access_list).
+Refer to the Terraform Registry for docs: [`databricks_ip_access_list`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/ip_access_list).
diff --git a/src/ip-access-list/index.ts b/src/ip-access-list/index.ts
index 1180240f9..adaf2d1c0 100644
--- a/src/ip-access-list/index.ts
+++ b/src/ip-access-list/index.ts
@@ -183,4 +183,42 @@ export class IpAccessList extends cdktf.TerraformResource {
list_type: cdktf.stringToTerraform(this._listType),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ enabled: {
+ value: cdktf.booleanToHclTerraform(this._enabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ip_addresses: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._ipAddresses),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ label: {
+ value: cdktf.stringToHclTerraform(this._label),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ list_type: {
+ value: cdktf.stringToHclTerraform(this._listType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/job/README.md b/src/job/README.md
index 8ef669942..083bc526c 100644
--- a/src/job/README.md
+++ b/src/job/README.md
@@ -1,3 +1,3 @@
# `databricks_job`
-Refer to the Terraform Registory for docs: [`databricks_job`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/job).
+Refer to the Terraform Registry for docs: [`databricks_job`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/job).
diff --git a/src/job/index-structs/structs0.ts b/src/job/index-structs/structs0.ts
index 5380ab3a3..4903fccb4 100644
--- a/src/job/index-structs/structs0.ts
+++ b/src/job/index-structs/structs0.ts
@@ -21,6 +21,25 @@ export function jobComputeSpecToTerraform(struct?: JobComputeSpecOutputReference
}
}
+
+export function jobComputeSpecToHclTerraform(struct?: JobComputeSpecOutputReference | JobComputeSpec): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ kind: {
+ value: cdktf.stringToHclTerraform(struct!.kind),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobComputeSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -93,6 +112,31 @@ export function jobComputeToTerraform(struct?: JobCompute | cdktf.IResolvable):
}
}
+
+export function jobComputeToHclTerraform(struct?: JobCompute | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ compute_key: {
+ value: cdktf.stringToHclTerraform(struct!.computeKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spec: {
+ value: jobComputeSpecToHclTerraform(struct!.spec),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobComputeSpecList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobComputeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -212,6 +256,25 @@ export function jobContinuousToTerraform(struct?: JobContinuousOutputReference |
}
}
+
+export function jobContinuousToHclTerraform(struct?: JobContinuousOutputReference | JobContinuous): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ pause_status: {
+ value: cdktf.stringToHclTerraform(struct!.pauseStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobContinuousOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -302,6 +365,55 @@ export function jobDbtTaskToTerraform(struct?: JobDbtTaskOutputReference | JobDb
}
}
+
+export function jobDbtTaskToHclTerraform(struct?: JobDbtTaskOutputReference | JobDbtTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ catalog: {
+ value: cdktf.stringToHclTerraform(struct!.catalog),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ commands: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.commands),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ profiles_directory: {
+ value: cdktf.stringToHclTerraform(struct!.profilesDirectory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ project_directory: {
+ value: cdktf.stringToHclTerraform(struct!.projectDirectory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ schema: {
+ value: cdktf.stringToHclTerraform(struct!.schema),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ warehouse_id: {
+ value: cdktf.stringToHclTerraform(struct!.warehouseId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobDbtTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -479,6 +591,31 @@ export function jobDeploymentToTerraform(struct?: JobDeploymentOutputReference |
}
}
+
+export function jobDeploymentToHclTerraform(struct?: JobDeploymentOutputReference | JobDeployment): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ kind: {
+ value: cdktf.stringToHclTerraform(struct!.kind),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metadata_file_path: {
+ value: cdktf.stringToHclTerraform(struct!.metadataFilePath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobDeploymentOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -583,6 +720,49 @@ export function jobEmailNotificationsToTerraform(struct?: JobEmailNotificationsO
}
}
+
+export function jobEmailNotificationsToHclTerraform(struct?: JobEmailNotificationsOutputReference | JobEmailNotifications): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ no_alert_for_skipped_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForSkippedRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ on_duration_warning_threshold_exceeded: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onDurationWarningThresholdExceeded),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_failure: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onFailure),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_start: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onStart),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_success: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onSuccess),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobEmailNotificationsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -746,6 +926,37 @@ export function jobGitSourceJobSourceToTerraform(struct?: JobGitSourceJobSourceO
}
}
+
+export function jobGitSourceJobSourceToHclTerraform(struct?: JobGitSourceJobSourceOutputReference | JobGitSourceJobSource): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dirty_state: {
+ value: cdktf.stringToHclTerraform(struct!.dirtyState),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ import_from_git_branch: {
+ value: cdktf.stringToHclTerraform(struct!.importFromGitBranch),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_config_path: {
+ value: cdktf.stringToHclTerraform(struct!.jobConfigPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobGitSourceJobSourceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -876,6 +1087,55 @@ export function jobGitSourceToTerraform(struct?: JobGitSourceOutputReference | J
}
}
+
+export function jobGitSourceToHclTerraform(struct?: JobGitSourceOutputReference | JobGitSource): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ branch: {
+ value: cdktf.stringToHclTerraform(struct!.branch),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ commit: {
+ value: cdktf.stringToHclTerraform(struct!.commit),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ provider: {
+ value: cdktf.stringToHclTerraform(struct!.provider),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tag: {
+ value: cdktf.stringToHclTerraform(struct!.tag),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_source: {
+ value: jobGitSourceJobSourceToHclTerraform(struct!.jobSource),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobGitSourceJobSourceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobGitSourceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1058,6 +1318,37 @@ export function jobHealthRulesToTerraform(struct?: JobHealthRules | cdktf.IResol
}
}
+
+export function jobHealthRulesToHclTerraform(struct?: JobHealthRules | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ metric: {
+ value: cdktf.stringToHclTerraform(struct!.metric),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ op: {
+ value: cdktf.stringToHclTerraform(struct!.op),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.numberToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobHealthRulesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1201,6 +1492,25 @@ export function jobHealthToTerraform(struct?: JobHealthOutputReference | JobHeal
}
}
+
+export function jobHealthToHclTerraform(struct?: JobHealthOutputReference | JobHealth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ rules: {
+ value: cdktf.listMapperHcl(jobHealthRulesToHclTerraform, true)(struct!.rules),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobHealthRulesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobHealthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1268,6 +1578,31 @@ export function jobJobClusterNewClusterAutoscaleToTerraform(struct?: JobJobClust
}
}
+
+export function jobJobClusterNewClusterAutoscaleToHclTerraform(struct?: JobJobClusterNewClusterAutoscaleOutputReference | JobJobClusterNewClusterAutoscale): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ max_workers: {
+ value: cdktf.numberToHclTerraform(struct!.maxWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_workers: {
+ value: cdktf.numberToHclTerraform(struct!.minWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterAutoscaleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1390,6 +1725,67 @@ export function jobJobClusterNewClusterAwsAttributesToTerraform(struct?: JobJobC
}
}
+
+export function jobJobClusterNewClusterAwsAttributesToHclTerraform(struct?: JobJobClusterNewClusterAwsAttributesOutputReference | JobJobClusterNewClusterAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_count: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_size: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1619,6 +2015,37 @@ export function jobJobClusterNewClusterAzureAttributesToTerraform(struct?: JobJo
}
}
+
+export function jobJobClusterNewClusterAzureAttributesToHclTerraform(struct?: JobJobClusterNewClusterAzureAttributesOutputReference | JobJobClusterNewClusterAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1728,6 +2155,25 @@ export function jobJobClusterNewClusterClusterLogConfDbfsToTerraform(struct?: Jo
}
}
+
+export function jobJobClusterNewClusterClusterLogConfDbfsToHclTerraform(struct?: JobJobClusterNewClusterClusterLogConfDbfsOutputReference | JobJobClusterNewClusterClusterLogConfDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterClusterLogConfDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1820,6 +2266,61 @@ export function jobJobClusterNewClusterClusterLogConfS3ToTerraform(struct?: JobJ
}
}
+
+export function jobJobClusterNewClusterClusterLogConfS3ToHclTerraform(struct?: JobJobClusterNewClusterClusterLogConfS3OutputReference | JobJobClusterNewClusterClusterLogConfS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterClusterLogConfS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2023,6 +2524,31 @@ export function jobJobClusterNewClusterClusterLogConfToTerraform(struct?: JobJob
}
}
+
+export function jobJobClusterNewClusterClusterLogConfToHclTerraform(struct?: JobJobClusterNewClusterClusterLogConfOutputReference | JobJobClusterNewClusterClusterLogConf): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dbfs: {
+ value: jobJobClusterNewClusterClusterLogConfDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterClusterLogConfDbfsList",
+ },
+ s3: {
+ value: jobJobClusterNewClusterClusterLogConfS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterClusterLogConfS3List",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterClusterLogConfOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2115,6 +2641,31 @@ export function jobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoToTe
}
}
+
+export function jobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct?: JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference | JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ mount_options: {
+ value: cdktf.stringToHclTerraform(struct!.mountOptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ server_address: {
+ value: cdktf.stringToHclTerraform(struct!.serverAddress),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2211,6 +2762,37 @@ export function jobJobClusterNewClusterClusterMountInfoToTerraform(struct?: JobJ
}
}
+
+export function jobJobClusterNewClusterClusterMountInfoToHclTerraform(struct?: JobJobClusterNewClusterClusterMountInfo | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ local_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.localMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ remote_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.remoteMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_filesystem_info: {
+ value: jobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct!.networkFilesystemInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterClusterMountInfoNetworkFilesystemInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterClusterMountInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -2351,6 +2933,31 @@ export function jobJobClusterNewClusterDockerImageBasicAuthToTerraform(struct?:
}
}
+
+export function jobJobClusterNewClusterDockerImageBasicAuthToHclTerraform(struct?: JobJobClusterNewClusterDockerImageBasicAuthOutputReference | JobJobClusterNewClusterDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2439,6 +3046,31 @@ export function jobJobClusterNewClusterDockerImageToTerraform(struct?: JobJobClu
}
}
+
+export function jobJobClusterNewClusterDockerImageToHclTerraform(struct?: JobJobClusterNewClusterDockerImageOutputReference | JobJobClusterNewClusterDockerImage): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: jobJobClusterNewClusterDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2548,6 +3180,55 @@ export function jobJobClusterNewClusterGcpAttributesToTerraform(struct?: JobJobC
}
}
+
+export function jobJobClusterNewClusterGcpAttributesToHclTerraform(struct?: JobJobClusterNewClusterGcpAttributesOutputReference | JobJobClusterNewClusterGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ boot_disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.bootDiskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(struct!.googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ use_preemptible_executors: {
+ value: cdktf.booleanToHclTerraform(struct!.usePreemptibleExecutors),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2723,6 +3404,25 @@ export function jobJobClusterNewClusterInitScriptsAbfssToTerraform(struct?: JobJ
}
}
+
+export function jobJobClusterNewClusterInitScriptsAbfssToHclTerraform(struct?: JobJobClusterNewClusterInitScriptsAbfssOutputReference | JobJobClusterNewClusterInitScriptsAbfss): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterInitScriptsAbfssOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2788,6 +3488,25 @@ export function jobJobClusterNewClusterInitScriptsDbfsToTerraform(struct?: JobJo
}
}
+
+export function jobJobClusterNewClusterInitScriptsDbfsToHclTerraform(struct?: JobJobClusterNewClusterInitScriptsDbfsOutputReference | JobJobClusterNewClusterInitScriptsDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterInitScriptsDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2850,6 +3569,25 @@ export function jobJobClusterNewClusterInitScriptsFileToTerraform(struct?: JobJo
}
}
+
+export function jobJobClusterNewClusterInitScriptsFileToHclTerraform(struct?: JobJobClusterNewClusterInitScriptsFileOutputReference | JobJobClusterNewClusterInitScriptsFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterInitScriptsFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2915,6 +3653,25 @@ export function jobJobClusterNewClusterInitScriptsGcsToTerraform(struct?: JobJob
}
}
+
+export function jobJobClusterNewClusterInitScriptsGcsToHclTerraform(struct?: JobJobClusterNewClusterInitScriptsGcsOutputReference | JobJobClusterNewClusterInitScriptsGcs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterInitScriptsGcsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3010,6 +3767,61 @@ export function jobJobClusterNewClusterInitScriptsS3ToTerraform(struct?: JobJobC
}
}
+
+export function jobJobClusterNewClusterInitScriptsS3ToHclTerraform(struct?: JobJobClusterNewClusterInitScriptsS3OutputReference | JobJobClusterNewClusterInitScriptsS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterInitScriptsS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3204,6 +4016,25 @@ export function jobJobClusterNewClusterInitScriptsVolumesToTerraform(struct?: Jo
}
}
+
+export function jobJobClusterNewClusterInitScriptsVolumesToHclTerraform(struct?: JobJobClusterNewClusterInitScriptsVolumesOutputReference | JobJobClusterNewClusterInitScriptsVolumes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterInitScriptsVolumesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3269,6 +4100,25 @@ export function jobJobClusterNewClusterInitScriptsWorkspaceToTerraform(struct?:
}
}
+
+export function jobJobClusterNewClusterInitScriptsWorkspaceToHclTerraform(struct?: JobJobClusterNewClusterInitScriptsWorkspaceOutputReference | JobJobClusterNewClusterInitScriptsWorkspace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterInitScriptsWorkspaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3378,6 +4228,61 @@ export function jobJobClusterNewClusterInitScriptsToTerraform(struct?: JobJobClu
}
}
+
+export function jobJobClusterNewClusterInitScriptsToHclTerraform(struct?: JobJobClusterNewClusterInitScripts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ abfss: {
+ value: jobJobClusterNewClusterInitScriptsAbfssToHclTerraform(struct!.abfss),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterInitScriptsAbfssList",
+ },
+ dbfs: {
+ value: jobJobClusterNewClusterInitScriptsDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterInitScriptsDbfsList",
+ },
+ file: {
+ value: jobJobClusterNewClusterInitScriptsFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterInitScriptsFileList",
+ },
+ gcs: {
+ value: jobJobClusterNewClusterInitScriptsGcsToHclTerraform(struct!.gcs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterInitScriptsGcsList",
+ },
+ s3: {
+ value: jobJobClusterNewClusterInitScriptsS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterInitScriptsS3List",
+ },
+ volumes: {
+ value: jobJobClusterNewClusterInitScriptsVolumesToHclTerraform(struct!.volumes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterInitScriptsVolumesList",
+ },
+ workspace: {
+ value: jobJobClusterNewClusterInitScriptsWorkspaceToHclTerraform(struct!.workspace),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterInitScriptsWorkspaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterInitScriptsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -3612,6 +4517,31 @@ export function jobJobClusterNewClusterWorkloadTypeClientsToTerraform(struct?: J
}
}
+
+export function jobJobClusterNewClusterWorkloadTypeClientsToHclTerraform(struct?: JobJobClusterNewClusterWorkloadTypeClientsOutputReference | JobJobClusterNewClusterWorkloadTypeClients): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jobs: {
+ value: cdktf.booleanToHclTerraform(struct!.jobs),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ notebooks: {
+ value: cdktf.booleanToHclTerraform(struct!.notebooks),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterWorkloadTypeClientsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3701,6 +4631,25 @@ export function jobJobClusterNewClusterWorkloadTypeToTerraform(struct?: JobJobCl
}
}
+
+export function jobJobClusterNewClusterWorkloadTypeToHclTerraform(struct?: JobJobClusterNewClusterWorkloadTypeOutputReference | JobJobClusterNewClusterWorkloadType): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ clients: {
+ value: jobJobClusterNewClusterWorkloadTypeClientsToHclTerraform(struct!.clients),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterWorkloadTypeClientsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterWorkloadTypeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -3926,6 +4875,199 @@ export function jobJobClusterNewClusterToTerraform(struct?: JobJobClusterNewClus
}
}
+
+export function jobJobClusterNewClusterToHclTerraform(struct?: JobJobClusterNewClusterOutputReference | JobJobClusterNewCluster): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ apply_policy_default_values: {
+ value: cdktf.booleanToHclTerraform(struct!.applyPolicyDefaultValues),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(struct!.autoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_name: {
+ value: cdktf.stringToHclTerraform(struct!.clusterName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ data_security_mode: {
+ value: cdktf.stringToHclTerraform(struct!.dataSecurityMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverInstancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverNodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(struct!.enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_local_disk_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableLocalDiskEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ idempotency_token: {
+ value: cdktf.stringToHclTerraform(struct!.idempotencyToken),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_workers: {
+ value: cdktf.numberToHclTerraform(struct!.numWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ policy_id: {
+ value: cdktf.stringToHclTerraform(struct!.policyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ runtime_engine: {
+ value: cdktf.stringToHclTerraform(struct!.runtimeEngine),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ single_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.singleUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkConf),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_env_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkEnvVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_version: {
+ value: cdktf.stringToHclTerraform(struct!.sparkVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ssh_public_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.sshPublicKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ autoscale: {
+ value: jobJobClusterNewClusterAutoscaleToHclTerraform(struct!.autoscale),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterAutoscaleList",
+ },
+ aws_attributes: {
+ value: jobJobClusterNewClusterAwsAttributesToHclTerraform(struct!.awsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterAwsAttributesList",
+ },
+ azure_attributes: {
+ value: jobJobClusterNewClusterAzureAttributesToHclTerraform(struct!.azureAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterAzureAttributesList",
+ },
+ cluster_log_conf: {
+ value: jobJobClusterNewClusterClusterLogConfToHclTerraform(struct!.clusterLogConf),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterClusterLogConfList",
+ },
+ cluster_mount_info: {
+ value: cdktf.listMapperHcl(jobJobClusterNewClusterClusterMountInfoToHclTerraform, true)(struct!.clusterMountInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterClusterMountInfoList",
+ },
+ docker_image: {
+ value: jobJobClusterNewClusterDockerImageToHclTerraform(struct!.dockerImage),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterDockerImageList",
+ },
+ gcp_attributes: {
+ value: jobJobClusterNewClusterGcpAttributesToHclTerraform(struct!.gcpAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterGcpAttributesList",
+ },
+ init_scripts: {
+ value: cdktf.listMapperHcl(jobJobClusterNewClusterInitScriptsToHclTerraform, true)(struct!.initScripts),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterInitScriptsList",
+ },
+ workload_type: {
+ value: jobJobClusterNewClusterWorkloadTypeToHclTerraform(struct!.workloadType),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterWorkloadTypeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterNewClusterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -4633,6 +5775,31 @@ export function jobJobClusterToTerraform(struct?: JobJobCluster | cdktf.IResolva
}
}
+
+export function jobJobClusterToHclTerraform(struct?: JobJobCluster | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ job_cluster_key: {
+ value: cdktf.stringToHclTerraform(struct!.jobClusterKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ new_cluster: {
+ value: jobJobClusterNewClusterToHclTerraform(struct!.newCluster),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterNewClusterList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobJobClusterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -4757,6 +5924,31 @@ export function jobLibraryCranToTerraform(struct?: JobLibraryCranOutputReference
}
}
+
+export function jobLibraryCranToHclTerraform(struct?: JobLibraryCranOutputReference | JobLibraryCran): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobLibraryCranOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -4851,6 +6043,37 @@ export function jobLibraryMavenToTerraform(struct?: JobLibraryMavenOutputReferen
}
}
+
+export function jobLibraryMavenToHclTerraform(struct?: JobLibraryMavenOutputReference | JobLibraryMaven): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ coordinates: {
+ value: cdktf.stringToHclTerraform(struct!.coordinates),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ exclusions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.exclusions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobLibraryMavenOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -4962,6 +6185,31 @@ export function jobLibraryPypiToTerraform(struct?: JobLibraryPypiOutputReference
}
}
+
+export function jobLibraryPypiToHclTerraform(struct?: JobLibraryPypiOutputReference | JobLibraryPypi): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobLibraryPypiOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5077,6 +6325,55 @@ export function jobLibraryToTerraform(struct?: JobLibrary | cdktf.IResolvable):
}
}
+
+export function jobLibraryToHclTerraform(struct?: JobLibrary | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ egg: {
+ value: cdktf.stringToHclTerraform(struct!.egg),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jar: {
+ value: cdktf.stringToHclTerraform(struct!.jar),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ whl: {
+ value: cdktf.stringToHclTerraform(struct!.whl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cran: {
+ value: jobLibraryCranToHclTerraform(struct!.cran),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobLibraryCranList",
+ },
+ maven: {
+ value: jobLibraryMavenToHclTerraform(struct!.maven),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobLibraryMavenList",
+ },
+ pypi: {
+ value: jobLibraryPypiToHclTerraform(struct!.pypi),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobLibraryPypiList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobLibraryOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -5289,6 +6586,31 @@ export function jobNewClusterAutoscaleToTerraform(struct?: JobNewClusterAutoscal
}
}
+
+export function jobNewClusterAutoscaleToHclTerraform(struct?: JobNewClusterAutoscaleOutputReference | JobNewClusterAutoscale): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ max_workers: {
+ value: cdktf.numberToHclTerraform(struct!.maxWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_workers: {
+ value: cdktf.numberToHclTerraform(struct!.minWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterAutoscaleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5411,6 +6733,67 @@ export function jobNewClusterAwsAttributesToTerraform(struct?: JobNewClusterAwsA
}
}
+
+export function jobNewClusterAwsAttributesToHclTerraform(struct?: JobNewClusterAwsAttributesOutputReference | JobNewClusterAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_count: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_size: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5640,6 +7023,37 @@ export function jobNewClusterAzureAttributesToTerraform(struct?: JobNewClusterAz
}
}
+
+export function jobNewClusterAzureAttributesToHclTerraform(struct?: JobNewClusterAzureAttributesOutputReference | JobNewClusterAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5749,6 +7163,25 @@ export function jobNewClusterClusterLogConfDbfsToTerraform(struct?: JobNewCluste
}
}
+
+export function jobNewClusterClusterLogConfDbfsToHclTerraform(struct?: JobNewClusterClusterLogConfDbfsOutputReference | JobNewClusterClusterLogConfDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterClusterLogConfDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -5841,6 +7274,61 @@ export function jobNewClusterClusterLogConfS3ToTerraform(struct?: JobNewClusterC
}
}
+
+export function jobNewClusterClusterLogConfS3ToHclTerraform(struct?: JobNewClusterClusterLogConfS3OutputReference | JobNewClusterClusterLogConfS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterClusterLogConfS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6044,6 +7532,31 @@ export function jobNewClusterClusterLogConfToTerraform(struct?: JobNewClusterClu
}
}
+
+export function jobNewClusterClusterLogConfToHclTerraform(struct?: JobNewClusterClusterLogConfOutputReference | JobNewClusterClusterLogConf): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dbfs: {
+ value: jobNewClusterClusterLogConfDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterClusterLogConfDbfsList",
+ },
+ s3: {
+ value: jobNewClusterClusterLogConfS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterClusterLogConfS3List",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterClusterLogConfOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6136,6 +7649,31 @@ export function jobNewClusterClusterMountInfoNetworkFilesystemInfoToTerraform(st
}
}
+
+export function jobNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct?: JobNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference | JobNewClusterClusterMountInfoNetworkFilesystemInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ mount_options: {
+ value: cdktf.stringToHclTerraform(struct!.mountOptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ server_address: {
+ value: cdktf.stringToHclTerraform(struct!.serverAddress),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6232,6 +7770,37 @@ export function jobNewClusterClusterMountInfoToTerraform(struct?: JobNewClusterC
}
}
+
+export function jobNewClusterClusterMountInfoToHclTerraform(struct?: JobNewClusterClusterMountInfo | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ local_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.localMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ remote_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.remoteMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_filesystem_info: {
+ value: jobNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct!.networkFilesystemInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterClusterMountInfoNetworkFilesystemInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterClusterMountInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -6372,6 +7941,31 @@ export function jobNewClusterDockerImageBasicAuthToTerraform(struct?: JobNewClus
}
}
+
+export function jobNewClusterDockerImageBasicAuthToHclTerraform(struct?: JobNewClusterDockerImageBasicAuthOutputReference | JobNewClusterDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6460,6 +8054,31 @@ export function jobNewClusterDockerImageToTerraform(struct?: JobNewClusterDocker
}
}
+
+export function jobNewClusterDockerImageToHclTerraform(struct?: JobNewClusterDockerImageOutputReference | JobNewClusterDockerImage): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: jobNewClusterDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6569,6 +8188,55 @@ export function jobNewClusterGcpAttributesToTerraform(struct?: JobNewClusterGcpA
}
}
+
+export function jobNewClusterGcpAttributesToHclTerraform(struct?: JobNewClusterGcpAttributesOutputReference | JobNewClusterGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ boot_disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.bootDiskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(struct!.googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ use_preemptible_executors: {
+ value: cdktf.booleanToHclTerraform(struct!.usePreemptibleExecutors),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6744,6 +8412,25 @@ export function jobNewClusterInitScriptsAbfssToTerraform(struct?: JobNewClusterI
}
}
+
+export function jobNewClusterInitScriptsAbfssToHclTerraform(struct?: JobNewClusterInitScriptsAbfssOutputReference | JobNewClusterInitScriptsAbfss): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterInitScriptsAbfssOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6809,6 +8496,25 @@ export function jobNewClusterInitScriptsDbfsToTerraform(struct?: JobNewClusterIn
}
}
+
+export function jobNewClusterInitScriptsDbfsToHclTerraform(struct?: JobNewClusterInitScriptsDbfsOutputReference | JobNewClusterInitScriptsDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterInitScriptsDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6871,6 +8577,25 @@ export function jobNewClusterInitScriptsFileToTerraform(struct?: JobNewClusterIn
}
}
+
+export function jobNewClusterInitScriptsFileToHclTerraform(struct?: JobNewClusterInitScriptsFileOutputReference | JobNewClusterInitScriptsFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterInitScriptsFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -6936,6 +8661,25 @@ export function jobNewClusterInitScriptsGcsToTerraform(struct?: JobNewClusterIni
}
}
+
+export function jobNewClusterInitScriptsGcsToHclTerraform(struct?: JobNewClusterInitScriptsGcsOutputReference | JobNewClusterInitScriptsGcs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterInitScriptsGcsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7031,6 +8775,61 @@ export function jobNewClusterInitScriptsS3ToTerraform(struct?: JobNewClusterInit
}
}
+
+export function jobNewClusterInitScriptsS3ToHclTerraform(struct?: JobNewClusterInitScriptsS3OutputReference | JobNewClusterInitScriptsS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterInitScriptsS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7225,6 +9024,25 @@ export function jobNewClusterInitScriptsVolumesToTerraform(struct?: JobNewCluste
}
}
+
+export function jobNewClusterInitScriptsVolumesToHclTerraform(struct?: JobNewClusterInitScriptsVolumesOutputReference | JobNewClusterInitScriptsVolumes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterInitScriptsVolumesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7290,6 +9108,25 @@ export function jobNewClusterInitScriptsWorkspaceToTerraform(struct?: JobNewClus
}
}
+
+export function jobNewClusterInitScriptsWorkspaceToHclTerraform(struct?: JobNewClusterInitScriptsWorkspaceOutputReference | JobNewClusterInitScriptsWorkspace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterInitScriptsWorkspaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7399,6 +9236,61 @@ export function jobNewClusterInitScriptsToTerraform(struct?: JobNewClusterInitSc
}
}
+
+export function jobNewClusterInitScriptsToHclTerraform(struct?: JobNewClusterInitScripts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ abfss: {
+ value: jobNewClusterInitScriptsAbfssToHclTerraform(struct!.abfss),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterInitScriptsAbfssList",
+ },
+ dbfs: {
+ value: jobNewClusterInitScriptsDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterInitScriptsDbfsList",
+ },
+ file: {
+ value: jobNewClusterInitScriptsFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterInitScriptsFileList",
+ },
+ gcs: {
+ value: jobNewClusterInitScriptsGcsToHclTerraform(struct!.gcs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterInitScriptsGcsList",
+ },
+ s3: {
+ value: jobNewClusterInitScriptsS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterInitScriptsS3List",
+ },
+ volumes: {
+ value: jobNewClusterInitScriptsVolumesToHclTerraform(struct!.volumes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterInitScriptsVolumesList",
+ },
+ workspace: {
+ value: jobNewClusterInitScriptsWorkspaceToHclTerraform(struct!.workspace),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterInitScriptsWorkspaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterInitScriptsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -7633,6 +9525,31 @@ export function jobNewClusterWorkloadTypeClientsToTerraform(struct?: JobNewClust
}
}
+
+export function jobNewClusterWorkloadTypeClientsToHclTerraform(struct?: JobNewClusterWorkloadTypeClientsOutputReference | JobNewClusterWorkloadTypeClients): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jobs: {
+ value: cdktf.booleanToHclTerraform(struct!.jobs),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ notebooks: {
+ value: cdktf.booleanToHclTerraform(struct!.notebooks),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterWorkloadTypeClientsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7722,6 +9639,25 @@ export function jobNewClusterWorkloadTypeToTerraform(struct?: JobNewClusterWorkl
}
}
+
+export function jobNewClusterWorkloadTypeToHclTerraform(struct?: JobNewClusterWorkloadTypeOutputReference | JobNewClusterWorkloadType): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ clients: {
+ value: jobNewClusterWorkloadTypeClientsToHclTerraform(struct!.clients),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterWorkloadTypeClientsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterWorkloadTypeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -7947,6 +9883,199 @@ export function jobNewClusterToTerraform(struct?: JobNewClusterOutputReference |
}
}
+
+export function jobNewClusterToHclTerraform(struct?: JobNewClusterOutputReference | JobNewCluster): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ apply_policy_default_values: {
+ value: cdktf.booleanToHclTerraform(struct!.applyPolicyDefaultValues),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(struct!.autoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_name: {
+ value: cdktf.stringToHclTerraform(struct!.clusterName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ data_security_mode: {
+ value: cdktf.stringToHclTerraform(struct!.dataSecurityMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverInstancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverNodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(struct!.enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_local_disk_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableLocalDiskEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ idempotency_token: {
+ value: cdktf.stringToHclTerraform(struct!.idempotencyToken),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_workers: {
+ value: cdktf.numberToHclTerraform(struct!.numWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ policy_id: {
+ value: cdktf.stringToHclTerraform(struct!.policyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ runtime_engine: {
+ value: cdktf.stringToHclTerraform(struct!.runtimeEngine),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ single_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.singleUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkConf),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_env_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkEnvVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_version: {
+ value: cdktf.stringToHclTerraform(struct!.sparkVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ssh_public_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.sshPublicKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ autoscale: {
+ value: jobNewClusterAutoscaleToHclTerraform(struct!.autoscale),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterAutoscaleList",
+ },
+ aws_attributes: {
+ value: jobNewClusterAwsAttributesToHclTerraform(struct!.awsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterAwsAttributesList",
+ },
+ azure_attributes: {
+ value: jobNewClusterAzureAttributesToHclTerraform(struct!.azureAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterAzureAttributesList",
+ },
+ cluster_log_conf: {
+ value: jobNewClusterClusterLogConfToHclTerraform(struct!.clusterLogConf),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterClusterLogConfList",
+ },
+ cluster_mount_info: {
+ value: cdktf.listMapperHcl(jobNewClusterClusterMountInfoToHclTerraform, true)(struct!.clusterMountInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterClusterMountInfoList",
+ },
+ docker_image: {
+ value: jobNewClusterDockerImageToHclTerraform(struct!.dockerImage),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterDockerImageList",
+ },
+ gcp_attributes: {
+ value: jobNewClusterGcpAttributesToHclTerraform(struct!.gcpAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterGcpAttributesList",
+ },
+ init_scripts: {
+ value: cdktf.listMapperHcl(jobNewClusterInitScriptsToHclTerraform, true)(struct!.initScripts),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterInitScriptsList",
+ },
+ workload_type: {
+ value: jobNewClusterWorkloadTypeToHclTerraform(struct!.workloadType),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterWorkloadTypeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNewClusterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -8657,6 +10786,37 @@ export function jobNotebookTaskToTerraform(struct?: JobNotebookTaskOutputReferen
}
}
+
+export function jobNotebookTaskToHclTerraform(struct?: JobNotebookTaskOutputReference | JobNotebookTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ base_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.baseParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ notebook_path: {
+ value: cdktf.stringToHclTerraform(struct!.notebookPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(struct!.source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNotebookTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -8768,6 +10928,31 @@ export function jobNotificationSettingsToTerraform(struct?: JobNotificationSetti
}
}
+
+export function jobNotificationSettingsToHclTerraform(struct?: JobNotificationSettingsOutputReference | JobNotificationSettings): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ no_alert_for_canceled_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForCanceledRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ no_alert_for_skipped_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForSkippedRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobNotificationSettingsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -8860,6 +11045,31 @@ export function jobParameterToTerraform(struct?: JobParameter | cdktf.IResolvabl
}
}
+
+export function jobParameterToHclTerraform(struct?: JobParameter | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ default: {
+ value: cdktf.stringToHclTerraform(struct!.default),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobParameterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -8984,6 +11194,31 @@ export function jobPipelineTaskToTerraform(struct?: JobPipelineTaskOutputReferen
}
}
+
+export function jobPipelineTaskToHclTerraform(struct?: JobPipelineTaskOutputReference | JobPipelineTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ full_refresh: {
+ value: cdktf.booleanToHclTerraform(struct!.fullRefresh),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ pipeline_id: {
+ value: cdktf.stringToHclTerraform(struct!.pipelineId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobPipelineTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9083,6 +11318,43 @@ export function jobPythonWheelTaskToTerraform(struct?: JobPythonWheelTaskOutputR
}
}
+
+export function jobPythonWheelTaskToHclTerraform(struct?: JobPythonWheelTaskOutputReference | JobPythonWheelTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ entry_point: {
+ value: cdktf.stringToHclTerraform(struct!.entryPoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ named_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.namedParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ package_name: {
+ value: cdktf.stringToHclTerraform(struct!.packageName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobPythonWheelTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9214,6 +11486,25 @@ export function jobQueueToTerraform(struct?: JobQueueOutputReference | JobQueue)
}
}
+
+export function jobQueueToHclTerraform(struct?: JobQueueOutputReference | JobQueue): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ enabled: {
+ value: cdktf.booleanToHclTerraform(struct!.enabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobQueueOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9281,6 +11572,31 @@ export function jobRunAsToTerraform(struct?: JobRunAsOutputReference | JobRunAs)
}
}
+
+export function jobRunAsToHclTerraform(struct?: JobRunAsOutputReference | JobRunAs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ service_principal_name: {
+ value: cdktf.stringToHclTerraform(struct!.servicePrincipalName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_name: {
+ value: cdktf.stringToHclTerraform(struct!.userName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobRunAsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9373,6 +11689,31 @@ export function jobRunJobTaskToTerraform(struct?: JobRunJobTaskOutputReference |
}
}
+
+export function jobRunJobTaskToHclTerraform(struct?: JobRunJobTaskOutputReference | JobRunJobTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ job_id: {
+ value: cdktf.numberToHclTerraform(struct!.jobId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ job_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.jobParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobRunJobTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9467,6 +11808,37 @@ export function jobScheduleToTerraform(struct?: JobScheduleOutputReference | Job
}
}
+
+export function jobScheduleToHclTerraform(struct?: JobScheduleOutputReference | JobSchedule): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ pause_status: {
+ value: cdktf.stringToHclTerraform(struct!.pauseStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ quartz_cron_expression: {
+ value: cdktf.stringToHclTerraform(struct!.quartzCronExpression),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ timezone_id: {
+ value: cdktf.stringToHclTerraform(struct!.timezoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobScheduleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9580,6 +11952,37 @@ export function jobSparkJarTaskToTerraform(struct?: JobSparkJarTaskOutputReferen
}
}
+
+export function jobSparkJarTaskToHclTerraform(struct?: JobSparkJarTaskOutputReference | JobSparkJarTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jar_uri: {
+ value: cdktf.stringToHclTerraform(struct!.jarUri),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ main_class_name: {
+ value: cdktf.stringToHclTerraform(struct!.mainClassName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobSparkJarTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9699,6 +12102,37 @@ export function jobSparkPythonTaskToTerraform(struct?: JobSparkPythonTaskOutputR
}
}
+
+export function jobSparkPythonTaskToHclTerraform(struct?: JobSparkPythonTaskOutputReference | JobSparkPythonTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ python_file: {
+ value: cdktf.stringToHclTerraform(struct!.pythonFile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(struct!.source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobSparkPythonTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9805,6 +12239,25 @@ export function jobSparkSubmitTaskToTerraform(struct?: JobSparkSubmitTaskOutputR
}
}
+
+export function jobSparkSubmitTaskToHclTerraform(struct?: JobSparkSubmitTaskOutputReference | JobSparkSubmitTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobSparkSubmitTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -9880,6 +12333,37 @@ export function jobTaskConditionTaskToTerraform(struct?: JobTaskConditionTaskOut
}
}
+
+export function jobTaskConditionTaskToHclTerraform(struct?: JobTaskConditionTaskOutputReference | JobTaskConditionTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ left: {
+ value: cdktf.stringToHclTerraform(struct!.left),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ op: {
+ value: cdktf.stringToHclTerraform(struct!.op),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ right: {
+ value: cdktf.stringToHclTerraform(struct!.right),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskConditionTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10014,6 +12498,55 @@ export function jobTaskDbtTaskToTerraform(struct?: JobTaskDbtTaskOutputReference
}
}
+
+export function jobTaskDbtTaskToHclTerraform(struct?: JobTaskDbtTaskOutputReference | JobTaskDbtTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ catalog: {
+ value: cdktf.stringToHclTerraform(struct!.catalog),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ commands: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.commands),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ profiles_directory: {
+ value: cdktf.stringToHclTerraform(struct!.profilesDirectory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ project_directory: {
+ value: cdktf.stringToHclTerraform(struct!.projectDirectory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ schema: {
+ value: cdktf.stringToHclTerraform(struct!.schema),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ warehouse_id: {
+ value: cdktf.stringToHclTerraform(struct!.warehouseId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskDbtTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10191,6 +12724,31 @@ export function jobTaskDependsOnToTerraform(struct?: JobTaskDependsOn | cdktf.IR
}
}
+
+export function jobTaskDependsOnToHclTerraform(struct?: JobTaskDependsOn | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ outcome: {
+ value: cdktf.stringToHclTerraform(struct!.outcome),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ task_key: {
+ value: cdktf.stringToHclTerraform(struct!.taskKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskDependsOnOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -10322,6 +12880,43 @@ export function jobTaskEmailNotificationsToTerraform(struct?: JobTaskEmailNotifi
}
}
+
+export function jobTaskEmailNotificationsToHclTerraform(struct?: JobTaskEmailNotificationsOutputReference | JobTaskEmailNotifications): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ on_duration_warning_threshold_exceeded: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onDurationWarningThresholdExceeded),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_failure: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onFailure),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_start: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onStart),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ on_success: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.onSuccess),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskEmailNotificationsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10463,6 +13058,37 @@ export function jobTaskHealthRulesToTerraform(struct?: JobTaskHealthRules | cdkt
}
}
+
+export function jobTaskHealthRulesToHclTerraform(struct?: JobTaskHealthRules | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ metric: {
+ value: cdktf.stringToHclTerraform(struct!.metric),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ op: {
+ value: cdktf.stringToHclTerraform(struct!.op),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.numberToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskHealthRulesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -10606,6 +13232,25 @@ export function jobTaskHealthToTerraform(struct?: JobTaskHealthOutputReference |
}
}
+
+export function jobTaskHealthToHclTerraform(struct?: JobTaskHealthOutputReference | JobTaskHealth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ rules: {
+ value: cdktf.listMapperHcl(jobTaskHealthRulesToHclTerraform, true)(struct!.rules),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskHealthRulesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskHealthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10673,6 +13318,31 @@ export function jobTaskLibraryCranToTerraform(struct?: JobTaskLibraryCranOutputR
}
}
+
+export function jobTaskLibraryCranToHclTerraform(struct?: JobTaskLibraryCranOutputReference | JobTaskLibraryCran): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskLibraryCranOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10767,6 +13437,37 @@ export function jobTaskLibraryMavenToTerraform(struct?: JobTaskLibraryMavenOutpu
}
}
+
+export function jobTaskLibraryMavenToHclTerraform(struct?: JobTaskLibraryMavenOutputReference | JobTaskLibraryMaven): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ coordinates: {
+ value: cdktf.stringToHclTerraform(struct!.coordinates),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ exclusions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.exclusions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskLibraryMavenOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10878,6 +13579,31 @@ export function jobTaskLibraryPypiToTerraform(struct?: JobTaskLibraryPypiOutputR
}
}
+
+export function jobTaskLibraryPypiToHclTerraform(struct?: JobTaskLibraryPypiOutputReference | JobTaskLibraryPypi): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskLibraryPypiOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -10993,6 +13719,55 @@ export function jobTaskLibraryToTerraform(struct?: JobTaskLibrary | cdktf.IResol
}
}
+
+export function jobTaskLibraryToHclTerraform(struct?: JobTaskLibrary | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ egg: {
+ value: cdktf.stringToHclTerraform(struct!.egg),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jar: {
+ value: cdktf.stringToHclTerraform(struct!.jar),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ whl: {
+ value: cdktf.stringToHclTerraform(struct!.whl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cran: {
+ value: jobTaskLibraryCranToHclTerraform(struct!.cran),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskLibraryCranList",
+ },
+ maven: {
+ value: jobTaskLibraryMavenToHclTerraform(struct!.maven),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskLibraryMavenList",
+ },
+ pypi: {
+ value: jobTaskLibraryPypiToHclTerraform(struct!.pypi),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskLibraryPypiList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskLibraryOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -11205,6 +13980,31 @@ export function jobTaskNewClusterAutoscaleToTerraform(struct?: JobTaskNewCluster
}
}
+
+export function jobTaskNewClusterAutoscaleToHclTerraform(struct?: JobTaskNewClusterAutoscaleOutputReference | JobTaskNewClusterAutoscale): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ max_workers: {
+ value: cdktf.numberToHclTerraform(struct!.maxWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_workers: {
+ value: cdktf.numberToHclTerraform(struct!.minWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterAutoscaleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11327,6 +14127,67 @@ export function jobTaskNewClusterAwsAttributesToTerraform(struct?: JobTaskNewClu
}
}
+
+export function jobTaskNewClusterAwsAttributesToHclTerraform(struct?: JobTaskNewClusterAwsAttributesOutputReference | JobTaskNewClusterAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_count: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_size: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11556,6 +14417,37 @@ export function jobTaskNewClusterAzureAttributesToTerraform(struct?: JobTaskNewC
}
}
+
+export function jobTaskNewClusterAzureAttributesToHclTerraform(struct?: JobTaskNewClusterAzureAttributesOutputReference | JobTaskNewClusterAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11665,6 +14557,25 @@ export function jobTaskNewClusterClusterLogConfDbfsToTerraform(struct?: JobTaskN
}
}
+
+export function jobTaskNewClusterClusterLogConfDbfsToHclTerraform(struct?: JobTaskNewClusterClusterLogConfDbfsOutputReference | JobTaskNewClusterClusterLogConfDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterClusterLogConfDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11757,6 +14668,61 @@ export function jobTaskNewClusterClusterLogConfS3ToTerraform(struct?: JobTaskNew
}
}
+
+export function jobTaskNewClusterClusterLogConfS3ToHclTerraform(struct?: JobTaskNewClusterClusterLogConfS3OutputReference | JobTaskNewClusterClusterLogConfS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterClusterLogConfS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -11960,6 +14926,31 @@ export function jobTaskNewClusterClusterLogConfToTerraform(struct?: JobTaskNewCl
}
}
+
+export function jobTaskNewClusterClusterLogConfToHclTerraform(struct?: JobTaskNewClusterClusterLogConfOutputReference | JobTaskNewClusterClusterLogConf): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dbfs: {
+ value: jobTaskNewClusterClusterLogConfDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterClusterLogConfDbfsList",
+ },
+ s3: {
+ value: jobTaskNewClusterClusterLogConfS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterClusterLogConfS3List",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterClusterLogConfOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12052,6 +15043,31 @@ export function jobTaskNewClusterClusterMountInfoNetworkFilesystemInfoToTerrafor
}
}
+
+export function jobTaskNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct?: JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference | JobTaskNewClusterClusterMountInfoNetworkFilesystemInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ mount_options: {
+ value: cdktf.stringToHclTerraform(struct!.mountOptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ server_address: {
+ value: cdktf.stringToHclTerraform(struct!.serverAddress),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12148,6 +15164,37 @@ export function jobTaskNewClusterClusterMountInfoToTerraform(struct?: JobTaskNew
}
}
+
+export function jobTaskNewClusterClusterMountInfoToHclTerraform(struct?: JobTaskNewClusterClusterMountInfo | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ local_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.localMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ remote_mount_dir_path: {
+ value: cdktf.stringToHclTerraform(struct!.remoteMountDirPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_filesystem_info: {
+ value: jobTaskNewClusterClusterMountInfoNetworkFilesystemInfoToHclTerraform(struct!.networkFilesystemInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterClusterMountInfoNetworkFilesystemInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterClusterMountInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -12288,6 +15335,31 @@ export function jobTaskNewClusterDockerImageBasicAuthToTerraform(struct?: JobTas
}
}
+
+export function jobTaskNewClusterDockerImageBasicAuthToHclTerraform(struct?: JobTaskNewClusterDockerImageBasicAuthOutputReference | JobTaskNewClusterDockerImageBasicAuth): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ password: {
+ value: cdktf.stringToHclTerraform(struct!.password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(struct!.username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterDockerImageBasicAuthOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12376,6 +15448,31 @@ export function jobTaskNewClusterDockerImageToTerraform(struct?: JobTaskNewClust
}
}
+
+export function jobTaskNewClusterDockerImageToHclTerraform(struct?: JobTaskNewClusterDockerImageOutputReference | JobTaskNewClusterDockerImage): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ basic_auth: {
+ value: jobTaskNewClusterDockerImageBasicAuthToHclTerraform(struct!.basicAuth),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterDockerImageBasicAuthList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterDockerImageOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12485,6 +15582,55 @@ export function jobTaskNewClusterGcpAttributesToTerraform(struct?: JobTaskNewClu
}
}
+
+export function jobTaskNewClusterGcpAttributesToHclTerraform(struct?: JobTaskNewClusterGcpAttributesOutputReference | JobTaskNewClusterGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ boot_disk_size: {
+ value: cdktf.numberToHclTerraform(struct!.bootDiskSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(struct!.googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ use_preemptible_executors: {
+ value: cdktf.booleanToHclTerraform(struct!.usePreemptibleExecutors),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12660,6 +15806,25 @@ export function jobTaskNewClusterInitScriptsAbfssToTerraform(struct?: JobTaskNew
}
}
+
+export function jobTaskNewClusterInitScriptsAbfssToHclTerraform(struct?: JobTaskNewClusterInitScriptsAbfssOutputReference | JobTaskNewClusterInitScriptsAbfss): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterInitScriptsAbfssOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12725,6 +15890,25 @@ export function jobTaskNewClusterInitScriptsDbfsToTerraform(struct?: JobTaskNewC
}
}
+
+export function jobTaskNewClusterInitScriptsDbfsToHclTerraform(struct?: JobTaskNewClusterInitScriptsDbfsOutputReference | JobTaskNewClusterInitScriptsDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterInitScriptsDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12787,6 +15971,25 @@ export function jobTaskNewClusterInitScriptsFileToTerraform(struct?: JobTaskNewC
}
}
+
+export function jobTaskNewClusterInitScriptsFileToHclTerraform(struct?: JobTaskNewClusterInitScriptsFileOutputReference | JobTaskNewClusterInitScriptsFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterInitScriptsFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12852,6 +16055,25 @@ export function jobTaskNewClusterInitScriptsGcsToTerraform(struct?: JobTaskNewCl
}
}
+
+export function jobTaskNewClusterInitScriptsGcsToHclTerraform(struct?: JobTaskNewClusterInitScriptsGcsOutputReference | JobTaskNewClusterInitScriptsGcs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterInitScriptsGcsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -12947,6 +16169,61 @@ export function jobTaskNewClusterInitScriptsS3ToTerraform(struct?: JobTaskNewClu
}
}
+
+export function jobTaskNewClusterInitScriptsS3ToHclTerraform(struct?: JobTaskNewClusterInitScriptsS3OutputReference | JobTaskNewClusterInitScriptsS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterInitScriptsS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13141,6 +16418,25 @@ export function jobTaskNewClusterInitScriptsVolumesToTerraform(struct?: JobTaskN
}
}
+
+export function jobTaskNewClusterInitScriptsVolumesToHclTerraform(struct?: JobTaskNewClusterInitScriptsVolumesOutputReference | JobTaskNewClusterInitScriptsVolumes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterInitScriptsVolumesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13206,6 +16502,25 @@ export function jobTaskNewClusterInitScriptsWorkspaceToTerraform(struct?: JobTas
}
}
+
+export function jobTaskNewClusterInitScriptsWorkspaceToHclTerraform(struct?: JobTaskNewClusterInitScriptsWorkspaceOutputReference | JobTaskNewClusterInitScriptsWorkspace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterInitScriptsWorkspaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13315,6 +16630,61 @@ export function jobTaskNewClusterInitScriptsToTerraform(struct?: JobTaskNewClust
}
}
+
+export function jobTaskNewClusterInitScriptsToHclTerraform(struct?: JobTaskNewClusterInitScripts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ abfss: {
+ value: jobTaskNewClusterInitScriptsAbfssToHclTerraform(struct!.abfss),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterInitScriptsAbfssList",
+ },
+ dbfs: {
+ value: jobTaskNewClusterInitScriptsDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterInitScriptsDbfsList",
+ },
+ file: {
+ value: jobTaskNewClusterInitScriptsFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterInitScriptsFileList",
+ },
+ gcs: {
+ value: jobTaskNewClusterInitScriptsGcsToHclTerraform(struct!.gcs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterInitScriptsGcsList",
+ },
+ s3: {
+ value: jobTaskNewClusterInitScriptsS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterInitScriptsS3List",
+ },
+ volumes: {
+ value: jobTaskNewClusterInitScriptsVolumesToHclTerraform(struct!.volumes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterInitScriptsVolumesList",
+ },
+ workspace: {
+ value: jobTaskNewClusterInitScriptsWorkspaceToHclTerraform(struct!.workspace),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterInitScriptsWorkspaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterInitScriptsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -13549,6 +16919,31 @@ export function jobTaskNewClusterWorkloadTypeClientsToTerraform(struct?: JobTask
}
}
+
+export function jobTaskNewClusterWorkloadTypeClientsToHclTerraform(struct?: JobTaskNewClusterWorkloadTypeClientsOutputReference | JobTaskNewClusterWorkloadTypeClients): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jobs: {
+ value: cdktf.booleanToHclTerraform(struct!.jobs),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ notebooks: {
+ value: cdktf.booleanToHclTerraform(struct!.notebooks),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterWorkloadTypeClientsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13638,6 +17033,25 @@ export function jobTaskNewClusterWorkloadTypeToTerraform(struct?: JobTaskNewClus
}
}
+
+export function jobTaskNewClusterWorkloadTypeToHclTerraform(struct?: JobTaskNewClusterWorkloadTypeOutputReference | JobTaskNewClusterWorkloadType): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ clients: {
+ value: jobTaskNewClusterWorkloadTypeClientsToHclTerraform(struct!.clients),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterWorkloadTypeClientsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterWorkloadTypeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -13863,6 +17277,199 @@ export function jobTaskNewClusterToTerraform(struct?: JobTaskNewClusterOutputRef
}
}
+
+export function jobTaskNewClusterToHclTerraform(struct?: JobTaskNewClusterOutputReference | JobTaskNewCluster): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ apply_policy_default_values: {
+ value: cdktf.booleanToHclTerraform(struct!.applyPolicyDefaultValues),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ autotermination_minutes: {
+ value: cdktf.numberToHclTerraform(struct!.autoterminationMinutes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_name: {
+ value: cdktf.stringToHclTerraform(struct!.clusterName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ data_security_mode: {
+ value: cdktf.stringToHclTerraform(struct!.dataSecurityMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverInstancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverNodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_elastic_disk: {
+ value: cdktf.booleanToHclTerraform(struct!.enableElasticDisk),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_local_disk_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableLocalDiskEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ idempotency_token: {
+ value: cdktf.stringToHclTerraform(struct!.idempotencyToken),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_workers: {
+ value: cdktf.numberToHclTerraform(struct!.numWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ policy_id: {
+ value: cdktf.stringToHclTerraform(struct!.policyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ runtime_engine: {
+ value: cdktf.stringToHclTerraform(struct!.runtimeEngine),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ single_user_name: {
+ value: cdktf.stringToHclTerraform(struct!.singleUserName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkConf),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_env_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkEnvVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_version: {
+ value: cdktf.stringToHclTerraform(struct!.sparkVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ssh_public_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.sshPublicKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ autoscale: {
+ value: jobTaskNewClusterAutoscaleToHclTerraform(struct!.autoscale),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterAutoscaleList",
+ },
+ aws_attributes: {
+ value: jobTaskNewClusterAwsAttributesToHclTerraform(struct!.awsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterAwsAttributesList",
+ },
+ azure_attributes: {
+ value: jobTaskNewClusterAzureAttributesToHclTerraform(struct!.azureAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterAzureAttributesList",
+ },
+ cluster_log_conf: {
+ value: jobTaskNewClusterClusterLogConfToHclTerraform(struct!.clusterLogConf),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterClusterLogConfList",
+ },
+ cluster_mount_info: {
+ value: cdktf.listMapperHcl(jobTaskNewClusterClusterMountInfoToHclTerraform, true)(struct!.clusterMountInfo),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterClusterMountInfoList",
+ },
+ docker_image: {
+ value: jobTaskNewClusterDockerImageToHclTerraform(struct!.dockerImage),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterDockerImageList",
+ },
+ gcp_attributes: {
+ value: jobTaskNewClusterGcpAttributesToHclTerraform(struct!.gcpAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterGcpAttributesList",
+ },
+ init_scripts: {
+ value: cdktf.listMapperHcl(jobTaskNewClusterInitScriptsToHclTerraform, true)(struct!.initScripts),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterInitScriptsList",
+ },
+ workload_type: {
+ value: jobTaskNewClusterWorkloadTypeToHclTerraform(struct!.workloadType),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterWorkloadTypeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNewClusterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -14573,6 +18180,37 @@ export function jobTaskNotebookTaskToTerraform(struct?: JobTaskNotebookTaskOutpu
}
}
+
+export function jobTaskNotebookTaskToHclTerraform(struct?: JobTaskNotebookTaskOutputReference | JobTaskNotebookTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ base_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.baseParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ notebook_path: {
+ value: cdktf.stringToHclTerraform(struct!.notebookPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(struct!.source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNotebookTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -14689,6 +18327,37 @@ export function jobTaskNotificationSettingsToTerraform(struct?: JobTaskNotificat
}
}
+
+export function jobTaskNotificationSettingsToHclTerraform(struct?: JobTaskNotificationSettingsOutputReference | JobTaskNotificationSettings): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ alert_on_last_attempt: {
+ value: cdktf.booleanToHclTerraform(struct!.alertOnLastAttempt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ no_alert_for_canceled_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForCanceledRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ no_alert_for_skipped_runs: {
+ value: cdktf.booleanToHclTerraform(struct!.noAlertForSkippedRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskNotificationSettingsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -14803,6 +18472,31 @@ export function jobTaskPipelineTaskToTerraform(struct?: JobTaskPipelineTaskOutpu
}
}
+
+export function jobTaskPipelineTaskToHclTerraform(struct?: JobTaskPipelineTaskOutputReference | JobTaskPipelineTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ full_refresh: {
+ value: cdktf.booleanToHclTerraform(struct!.fullRefresh),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ pipeline_id: {
+ value: cdktf.stringToHclTerraform(struct!.pipelineId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskPipelineTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -14902,6 +18596,43 @@ export function jobTaskPythonWheelTaskToTerraform(struct?: JobTaskPythonWheelTas
}
}
+
+export function jobTaskPythonWheelTaskToHclTerraform(struct?: JobTaskPythonWheelTaskOutputReference | JobTaskPythonWheelTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ entry_point: {
+ value: cdktf.stringToHclTerraform(struct!.entryPoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ named_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.namedParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ package_name: {
+ value: cdktf.stringToHclTerraform(struct!.packageName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskPythonWheelTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15038,6 +18769,31 @@ export function jobTaskRunJobTaskToTerraform(struct?: JobTaskRunJobTaskOutputRef
}
}
+
+export function jobTaskRunJobTaskToHclTerraform(struct?: JobTaskRunJobTaskOutputReference | JobTaskRunJobTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ job_id: {
+ value: cdktf.numberToHclTerraform(struct!.jobId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ job_parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.jobParameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskRunJobTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15132,6 +18888,37 @@ export function jobTaskSparkJarTaskToTerraform(struct?: JobTaskSparkJarTaskOutpu
}
}
+
+export function jobTaskSparkJarTaskToHclTerraform(struct?: JobTaskSparkJarTaskOutputReference | JobTaskSparkJarTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jar_uri: {
+ value: cdktf.stringToHclTerraform(struct!.jarUri),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ main_class_name: {
+ value: cdktf.stringToHclTerraform(struct!.mainClassName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSparkJarTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15251,6 +19038,37 @@ export function jobTaskSparkPythonTaskToTerraform(struct?: JobTaskSparkPythonTas
}
}
+
+export function jobTaskSparkPythonTaskToHclTerraform(struct?: JobTaskSparkPythonTaskOutputReference | JobTaskSparkPythonTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ python_file: {
+ value: cdktf.stringToHclTerraform(struct!.pythonFile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(struct!.source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSparkPythonTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15357,6 +19175,25 @@ export function jobTaskSparkSubmitTaskToTerraform(struct?: JobTaskSparkSubmitTas
}
}
+
+export function jobTaskSparkSubmitTaskToHclTerraform(struct?: JobTaskSparkSubmitTaskOutputReference | JobTaskSparkSubmitTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.parameters),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSparkSubmitTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15427,6 +19264,31 @@ export function jobTaskSqlTaskAlertSubscriptionsToTerraform(struct?: JobTaskSqlT
}
}
+
+export function jobTaskSqlTaskAlertSubscriptionsToHclTerraform(struct?: JobTaskSqlTaskAlertSubscriptions | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination_id: {
+ value: cdktf.stringToHclTerraform(struct!.destinationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_name: {
+ value: cdktf.stringToHclTerraform(struct!.userName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSqlTaskAlertSubscriptionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -15558,6 +19420,37 @@ export function jobTaskSqlTaskAlertToTerraform(struct?: JobTaskSqlTaskAlertOutpu
}
}
+
+export function jobTaskSqlTaskAlertToHclTerraform(struct?: JobTaskSqlTaskAlertOutputReference | JobTaskSqlTaskAlert): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ alert_id: {
+ value: cdktf.stringToHclTerraform(struct!.alertId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ pause_subscriptions: {
+ value: cdktf.booleanToHclTerraform(struct!.pauseSubscriptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ subscriptions: {
+ value: cdktf.listMapperHcl(jobTaskSqlTaskAlertSubscriptionsToHclTerraform, true)(struct!.subscriptions),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSqlTaskAlertSubscriptionsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSqlTaskAlertOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15666,6 +19559,31 @@ export function jobTaskSqlTaskDashboardSubscriptionsToTerraform(struct?: JobTask
}
}
+
+export function jobTaskSqlTaskDashboardSubscriptionsToHclTerraform(struct?: JobTaskSqlTaskDashboardSubscriptions | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination_id: {
+ value: cdktf.stringToHclTerraform(struct!.destinationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_name: {
+ value: cdktf.stringToHclTerraform(struct!.userName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSqlTaskDashboardSubscriptionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -15802,6 +19720,43 @@ export function jobTaskSqlTaskDashboardToTerraform(struct?: JobTaskSqlTaskDashbo
}
}
+
+export function jobTaskSqlTaskDashboardToHclTerraform(struct?: JobTaskSqlTaskDashboardOutputReference | JobTaskSqlTaskDashboard): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ custom_subject: {
+ value: cdktf.stringToHclTerraform(struct!.customSubject),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ dashboard_id: {
+ value: cdktf.stringToHclTerraform(struct!.dashboardId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ pause_subscriptions: {
+ value: cdktf.booleanToHclTerraform(struct!.pauseSubscriptions),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ subscriptions: {
+ value: cdktf.listMapperHcl(jobTaskSqlTaskDashboardSubscriptionsToHclTerraform, true)(struct!.subscriptions),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSqlTaskDashboardSubscriptionsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSqlTaskDashboardOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15930,6 +19885,25 @@ export function jobTaskSqlTaskFileToTerraform(struct?: JobTaskSqlTaskFileOutputR
}
}
+
+export function jobTaskSqlTaskFileToHclTerraform(struct?: JobTaskSqlTaskFileOutputReference | JobTaskSqlTaskFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ path: {
+ value: cdktf.stringToHclTerraform(struct!.path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSqlTaskFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -15992,6 +19966,25 @@ export function jobTaskSqlTaskQueryToTerraform(struct?: JobTaskSqlTaskQueryOutpu
}
}
+
+export function jobTaskSqlTaskQueryToHclTerraform(struct?: JobTaskSqlTaskQueryOutputReference | JobTaskSqlTaskQuery): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ query_id: {
+ value: cdktf.stringToHclTerraform(struct!.queryId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSqlTaskQueryOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -16087,6 +20080,55 @@ export function jobTaskSqlTaskToTerraform(struct?: JobTaskSqlTaskOutputReference
}
}
+
+export function jobTaskSqlTaskToHclTerraform(struct?: JobTaskSqlTaskOutputReference | JobTaskSqlTask): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ parameters: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.parameters),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ warehouse_id: {
+ value: cdktf.stringToHclTerraform(struct!.warehouseId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ alert: {
+ value: jobTaskSqlTaskAlertToHclTerraform(struct!.alert),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSqlTaskAlertList",
+ },
+ dashboard: {
+ value: jobTaskSqlTaskDashboardToHclTerraform(struct!.dashboard),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSqlTaskDashboardList",
+ },
+ file: {
+ value: jobTaskSqlTaskFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSqlTaskFileList",
+ },
+ query: {
+ value: jobTaskSqlTaskQueryToHclTerraform(struct!.query),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSqlTaskQueryList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskSqlTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -16265,6 +20307,25 @@ export function jobTaskWebhookNotificationsOnDurationWarningThresholdExceededToT
}
}
+
+export function jobTaskWebhookNotificationsOnDurationWarningThresholdExceededToHclTerraform(struct?: JobTaskWebhookNotificationsOnDurationWarningThresholdExceeded | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskWebhookNotificationsOnDurationWarningThresholdExceededOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -16365,6 +20426,25 @@ export function jobTaskWebhookNotificationsOnFailureToTerraform(struct?: JobTask
}
}
+
+export function jobTaskWebhookNotificationsOnFailureToHclTerraform(struct?: JobTaskWebhookNotificationsOnFailure | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskWebhookNotificationsOnFailureOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -16465,6 +20545,25 @@ export function jobTaskWebhookNotificationsOnStartToTerraform(struct?: JobTaskWe
}
}
+
+export function jobTaskWebhookNotificationsOnStartToHclTerraform(struct?: JobTaskWebhookNotificationsOnStart | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskWebhookNotificationsOnStartOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -16565,6 +20664,25 @@ export function jobTaskWebhookNotificationsOnSuccessToTerraform(struct?: JobTask
}
}
+
+export function jobTaskWebhookNotificationsOnSuccessToHclTerraform(struct?: JobTaskWebhookNotificationsOnSuccess | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskWebhookNotificationsOnSuccessOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -16685,6 +20803,43 @@ export function jobTaskWebhookNotificationsToTerraform(struct?: JobTaskWebhookNo
}
}
+
+export function jobTaskWebhookNotificationsToHclTerraform(struct?: JobTaskWebhookNotificationsOutputReference | JobTaskWebhookNotifications): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ on_duration_warning_threshold_exceeded: {
+ value: cdktf.listMapperHcl(jobTaskWebhookNotificationsOnDurationWarningThresholdExceededToHclTerraform, true)(struct!.onDurationWarningThresholdExceeded),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskWebhookNotificationsOnDurationWarningThresholdExceededList",
+ },
+ on_failure: {
+ value: cdktf.listMapperHcl(jobTaskWebhookNotificationsOnFailureToHclTerraform, true)(struct!.onFailure),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskWebhookNotificationsOnFailureList",
+ },
+ on_start: {
+ value: cdktf.listMapperHcl(jobTaskWebhookNotificationsOnStartToHclTerraform, true)(struct!.onStart),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskWebhookNotificationsOnStartList",
+ },
+ on_success: {
+ value: cdktf.listMapperHcl(jobTaskWebhookNotificationsOnSuccessToHclTerraform, true)(struct!.onSuccess),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskWebhookNotificationsOnSuccessList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskWebhookNotificationsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -16980,6 +21135,181 @@ export function jobTaskToTerraform(struct?: JobTask | cdktf.IResolvable): any {
}
}
+
+export function jobTaskToHclTerraform(struct?: JobTask | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ compute_key: {
+ value: cdktf.stringToHclTerraform(struct!.computeKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ description: {
+ value: cdktf.stringToHclTerraform(struct!.description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ existing_cluster_id: {
+ value: cdktf.stringToHclTerraform(struct!.existingClusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_cluster_key: {
+ value: cdktf.stringToHclTerraform(struct!.jobClusterKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ max_retries: {
+ value: cdktf.numberToHclTerraform(struct!.maxRetries),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_retry_interval_millis: {
+ value: cdktf.numberToHclTerraform(struct!.minRetryIntervalMillis),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ retry_on_timeout: {
+ value: cdktf.booleanToHclTerraform(struct!.retryOnTimeout),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ run_if: {
+ value: cdktf.stringToHclTerraform(struct!.runIf),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ task_key: {
+ value: cdktf.stringToHclTerraform(struct!.taskKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ timeout_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.timeoutSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ condition_task: {
+ value: jobTaskConditionTaskToHclTerraform(struct!.conditionTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskConditionTaskList",
+ },
+ dbt_task: {
+ value: jobTaskDbtTaskToHclTerraform(struct!.dbtTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskDbtTaskList",
+ },
+ depends_on: {
+ value: cdktf.listMapperHcl(jobTaskDependsOnToHclTerraform, true)(struct!.dependsOn),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskDependsOnList",
+ },
+ email_notifications: {
+ value: jobTaskEmailNotificationsToHclTerraform(struct!.emailNotifications),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskEmailNotificationsList",
+ },
+ health: {
+ value: jobTaskHealthToHclTerraform(struct!.health),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskHealthList",
+ },
+ library: {
+ value: cdktf.listMapperHcl(jobTaskLibraryToHclTerraform, true)(struct!.library),
+ isBlock: true,
+ type: "set",
+ storageClassType: "JobTaskLibraryList",
+ },
+ new_cluster: {
+ value: jobTaskNewClusterToHclTerraform(struct!.newCluster),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNewClusterList",
+ },
+ notebook_task: {
+ value: jobTaskNotebookTaskToHclTerraform(struct!.notebookTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNotebookTaskList",
+ },
+ notification_settings: {
+ value: jobTaskNotificationSettingsToHclTerraform(struct!.notificationSettings),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskNotificationSettingsList",
+ },
+ pipeline_task: {
+ value: jobTaskPipelineTaskToHclTerraform(struct!.pipelineTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskPipelineTaskList",
+ },
+ python_wheel_task: {
+ value: jobTaskPythonWheelTaskToHclTerraform(struct!.pythonWheelTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskPythonWheelTaskList",
+ },
+ run_job_task: {
+ value: jobTaskRunJobTaskToHclTerraform(struct!.runJobTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskRunJobTaskList",
+ },
+ spark_jar_task: {
+ value: jobTaskSparkJarTaskToHclTerraform(struct!.sparkJarTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSparkJarTaskList",
+ },
+ spark_python_task: {
+ value: jobTaskSparkPythonTaskToHclTerraform(struct!.sparkPythonTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSparkPythonTaskList",
+ },
+ spark_submit_task: {
+ value: jobTaskSparkSubmitTaskToHclTerraform(struct!.sparkSubmitTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSparkSubmitTaskList",
+ },
+ sql_task: {
+ value: jobTaskSqlTaskToHclTerraform(struct!.sqlTask),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskSqlTaskList",
+ },
+ webhook_notifications: {
+ value: jobTaskWebhookNotificationsToHclTerraform(struct!.webhookNotifications),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskWebhookNotificationsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTaskOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -17654,6 +21984,31 @@ export function jobTimeoutsToTerraform(struct?: JobTimeouts | cdktf.IResolvable)
}
}
+
+export function jobTimeoutsToHclTerraform(struct?: JobTimeouts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ create: {
+ value: cdktf.stringToHclTerraform(struct!.create),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ update: {
+ value: cdktf.stringToHclTerraform(struct!.update),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -17761,6 +22116,37 @@ export function jobTriggerFileArrivalToTerraform(struct?: JobTriggerFileArrivalO
}
}
+
+export function jobTriggerFileArrivalToHclTerraform(struct?: JobTriggerFileArrivalOutputReference | JobTriggerFileArrival): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ min_time_between_triggers_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.minTimeBetweenTriggersSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ wait_after_last_change_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.waitAfterLastChangeSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTriggerFileArrivalOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
diff --git a/src/job/index-structs/structs400.ts b/src/job/index-structs/structs400.ts
index 12e8446e9..c68d3fe9d 100644
--- a/src/job/index-structs/structs400.ts
+++ b/src/job/index-structs/structs400.ts
@@ -6,6 +6,7 @@
import * as cdktf from 'cdktf';
import { JobTriggerFileArrival,
jobTriggerFileArrivalToTerraform,
+jobTriggerFileArrivalToHclTerraform,
JobTriggerFileArrivalOutputReference } from './structs0'
export interface JobTrigger {
/**
@@ -31,6 +32,31 @@ export function jobTriggerToTerraform(struct?: JobTriggerOutputReference | JobTr
}
}
+
+export function jobTriggerToHclTerraform(struct?: JobTriggerOutputReference | JobTrigger): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ pause_status: {
+ value: cdktf.stringToHclTerraform(struct!.pauseStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ file_arrival: {
+ value: jobTriggerFileArrivalToHclTerraform(struct!.fileArrival),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTriggerFileArrivalList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobTriggerOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -118,6 +144,25 @@ export function jobWebhookNotificationsOnDurationWarningThresholdExceededToTerra
}
}
+
+export function jobWebhookNotificationsOnDurationWarningThresholdExceededToHclTerraform(struct?: JobWebhookNotificationsOnDurationWarningThresholdExceeded | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobWebhookNotificationsOnDurationWarningThresholdExceededOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -218,6 +263,25 @@ export function jobWebhookNotificationsOnFailureToTerraform(struct?: JobWebhookN
}
}
+
+export function jobWebhookNotificationsOnFailureToHclTerraform(struct?: JobWebhookNotificationsOnFailure | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobWebhookNotificationsOnFailureOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -318,6 +382,25 @@ export function jobWebhookNotificationsOnStartToTerraform(struct?: JobWebhookNot
}
}
+
+export function jobWebhookNotificationsOnStartToHclTerraform(struct?: JobWebhookNotificationsOnStart | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobWebhookNotificationsOnStartOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -418,6 +501,25 @@ export function jobWebhookNotificationsOnSuccessToTerraform(struct?: JobWebhookN
}
}
+
+export function jobWebhookNotificationsOnSuccessToHclTerraform(struct?: JobWebhookNotificationsOnSuccess | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobWebhookNotificationsOnSuccessOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -538,6 +640,43 @@ export function jobWebhookNotificationsToTerraform(struct?: JobWebhookNotificati
}
}
+
+export function jobWebhookNotificationsToHclTerraform(struct?: JobWebhookNotificationsOutputReference | JobWebhookNotifications): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ on_duration_warning_threshold_exceeded: {
+ value: cdktf.listMapperHcl(jobWebhookNotificationsOnDurationWarningThresholdExceededToHclTerraform, true)(struct!.onDurationWarningThresholdExceeded),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobWebhookNotificationsOnDurationWarningThresholdExceededList",
+ },
+ on_failure: {
+ value: cdktf.listMapperHcl(jobWebhookNotificationsOnFailureToHclTerraform, true)(struct!.onFailure),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobWebhookNotificationsOnFailureList",
+ },
+ on_start: {
+ value: cdktf.listMapperHcl(jobWebhookNotificationsOnStartToHclTerraform, true)(struct!.onStart),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobWebhookNotificationsOnStartList",
+ },
+ on_success: {
+ value: cdktf.listMapperHcl(jobWebhookNotificationsOnSuccessToHclTerraform, true)(struct!.onSuccess),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobWebhookNotificationsOnSuccessList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class JobWebhookNotificationsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
diff --git a/src/job/index.ts b/src/job/index.ts
index 3816a4361..d48023bd2 100644
--- a/src/job/index.ts
+++ b/src/job/index.ts
@@ -7,81 +7,107 @@
import { JobCompute,
jobComputeToTerraform,
+jobComputeToHclTerraform,
JobComputeList,
JobContinuous,
jobContinuousToTerraform,
+jobContinuousToHclTerraform,
JobContinuousOutputReference,
JobDbtTask,
jobDbtTaskToTerraform,
+jobDbtTaskToHclTerraform,
JobDbtTaskOutputReference,
JobDeployment,
jobDeploymentToTerraform,
+jobDeploymentToHclTerraform,
JobDeploymentOutputReference,
JobEmailNotifications,
jobEmailNotificationsToTerraform,
+jobEmailNotificationsToHclTerraform,
JobEmailNotificationsOutputReference,
JobGitSource,
jobGitSourceToTerraform,
+jobGitSourceToHclTerraform,
JobGitSourceOutputReference,
JobHealth,
jobHealthToTerraform,
+jobHealthToHclTerraform,
JobHealthOutputReference,
JobJobCluster,
jobJobClusterToTerraform,
+jobJobClusterToHclTerraform,
JobJobClusterList,
JobLibrary,
jobLibraryToTerraform,
+jobLibraryToHclTerraform,
JobLibraryList,
JobNewCluster,
jobNewClusterToTerraform,
+jobNewClusterToHclTerraform,
JobNewClusterOutputReference,
JobNotebookTask,
jobNotebookTaskToTerraform,
+jobNotebookTaskToHclTerraform,
JobNotebookTaskOutputReference,
JobNotificationSettings,
jobNotificationSettingsToTerraform,
+jobNotificationSettingsToHclTerraform,
JobNotificationSettingsOutputReference,
JobParameter,
jobParameterToTerraform,
+jobParameterToHclTerraform,
JobParameterList,
JobPipelineTask,
jobPipelineTaskToTerraform,
+jobPipelineTaskToHclTerraform,
JobPipelineTaskOutputReference,
JobPythonWheelTask,
jobPythonWheelTaskToTerraform,
+jobPythonWheelTaskToHclTerraform,
JobPythonWheelTaskOutputReference,
JobQueue,
jobQueueToTerraform,
+jobQueueToHclTerraform,
JobQueueOutputReference,
JobRunAs,
jobRunAsToTerraform,
+jobRunAsToHclTerraform,
JobRunAsOutputReference,
JobRunJobTask,
jobRunJobTaskToTerraform,
+jobRunJobTaskToHclTerraform,
JobRunJobTaskOutputReference,
JobSchedule,
jobScheduleToTerraform,
+jobScheduleToHclTerraform,
JobScheduleOutputReference,
JobSparkJarTask,
jobSparkJarTaskToTerraform,
+jobSparkJarTaskToHclTerraform,
JobSparkJarTaskOutputReference,
JobSparkPythonTask,
jobSparkPythonTaskToTerraform,
+jobSparkPythonTaskToHclTerraform,
JobSparkPythonTaskOutputReference,
JobSparkSubmitTask,
jobSparkSubmitTaskToTerraform,
+jobSparkSubmitTaskToHclTerraform,
JobSparkSubmitTaskOutputReference,
JobTask,
jobTaskToTerraform,
+jobTaskToHclTerraform,
JobTaskList,
JobTimeouts,
jobTimeoutsToTerraform,
+jobTimeoutsToHclTerraform,
JobTimeoutsOutputReference,
JobTrigger,
jobTriggerToTerraform,
+jobTriggerToHclTerraform,
JobTriggerOutputReference,
JobWebhookNotifications,
jobWebhookNotificationsToTerraform,
+jobWebhookNotificationsToHclTerraform,
JobWebhookNotificationsOutputReference} from './index-structs'
export * from './index-structs'
import { Construct } from 'constructs';
@@ -1094,4 +1120,252 @@ export class Job extends cdktf.TerraformResource {
webhook_notifications: jobWebhookNotificationsToTerraform(this._webhookNotifications.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ always_running: {
+ value: cdktf.booleanToHclTerraform(this._alwaysRunning),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ control_run_state: {
+ value: cdktf.booleanToHclTerraform(this._controlRunState),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ edit_mode: {
+ value: cdktf.stringToHclTerraform(this._editMode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ existing_cluster_id: {
+ value: cdktf.stringToHclTerraform(this._existingClusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ format: {
+ value: cdktf.stringToHclTerraform(this._format),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ max_concurrent_runs: {
+ value: cdktf.numberToHclTerraform(this._maxConcurrentRuns),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ max_retries: {
+ value: cdktf.numberToHclTerraform(this._maxRetries),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_retry_interval_millis: {
+ value: cdktf.numberToHclTerraform(this._minRetryIntervalMillis),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ retry_on_timeout: {
+ value: cdktf.booleanToHclTerraform(this._retryOnTimeout),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._tags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ timeout_seconds: {
+ value: cdktf.numberToHclTerraform(this._timeoutSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ compute: {
+ value: cdktf.listMapperHcl(jobComputeToHclTerraform, true)(this._compute.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobComputeList",
+ },
+ continuous: {
+ value: jobContinuousToHclTerraform(this._continuous.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobContinuousList",
+ },
+ dbt_task: {
+ value: jobDbtTaskToHclTerraform(this._dbtTask.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobDbtTaskList",
+ },
+ deployment: {
+ value: jobDeploymentToHclTerraform(this._deployment.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobDeploymentList",
+ },
+ email_notifications: {
+ value: jobEmailNotificationsToHclTerraform(this._emailNotifications.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobEmailNotificationsList",
+ },
+ git_source: {
+ value: jobGitSourceToHclTerraform(this._gitSource.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobGitSourceList",
+ },
+ health: {
+ value: jobHealthToHclTerraform(this._health.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobHealthList",
+ },
+ job_cluster: {
+ value: cdktf.listMapperHcl(jobJobClusterToHclTerraform, true)(this._jobCluster.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobJobClusterList",
+ },
+ library: {
+ value: cdktf.listMapperHcl(jobLibraryToHclTerraform, true)(this._library.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "JobLibraryList",
+ },
+ new_cluster: {
+ value: jobNewClusterToHclTerraform(this._newCluster.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNewClusterList",
+ },
+ notebook_task: {
+ value: jobNotebookTaskToHclTerraform(this._notebookTask.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNotebookTaskList",
+ },
+ notification_settings: {
+ value: jobNotificationSettingsToHclTerraform(this._notificationSettings.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobNotificationSettingsList",
+ },
+ parameter: {
+ value: cdktf.listMapperHcl(jobParameterToHclTerraform, true)(this._parameter.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobParameterList",
+ },
+ pipeline_task: {
+ value: jobPipelineTaskToHclTerraform(this._pipelineTask.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobPipelineTaskList",
+ },
+ python_wheel_task: {
+ value: jobPythonWheelTaskToHclTerraform(this._pythonWheelTask.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobPythonWheelTaskList",
+ },
+ queue: {
+ value: jobQueueToHclTerraform(this._queue.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobQueueList",
+ },
+ run_as: {
+ value: jobRunAsToHclTerraform(this._runAs.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobRunAsList",
+ },
+ run_job_task: {
+ value: jobRunJobTaskToHclTerraform(this._runJobTask.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobRunJobTaskList",
+ },
+ schedule: {
+ value: jobScheduleToHclTerraform(this._schedule.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobScheduleList",
+ },
+ spark_jar_task: {
+ value: jobSparkJarTaskToHclTerraform(this._sparkJarTask.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobSparkJarTaskList",
+ },
+ spark_python_task: {
+ value: jobSparkPythonTaskToHclTerraform(this._sparkPythonTask.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobSparkPythonTaskList",
+ },
+ spark_submit_task: {
+ value: jobSparkSubmitTaskToHclTerraform(this._sparkSubmitTask.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobSparkSubmitTaskList",
+ },
+ task: {
+ value: cdktf.listMapperHcl(jobTaskToHclTerraform, true)(this._task.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTaskList",
+ },
+ timeouts: {
+ value: jobTimeoutsToHclTerraform(this._timeouts.internalValue),
+ isBlock: true,
+ type: "struct",
+ storageClassType: "JobTimeouts",
+ },
+ trigger: {
+ value: jobTriggerToHclTerraform(this._trigger.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobTriggerList",
+ },
+ webhook_notifications: {
+ value: jobWebhookNotificationsToHclTerraform(this._webhookNotifications.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "JobWebhookNotificationsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/library/README.md b/src/library/README.md
index d1c24c9f6..6a83911f7 100644
--- a/src/library/README.md
+++ b/src/library/README.md
@@ -1,3 +1,3 @@
# `databricks_library`
-Refer to the Terraform Registory for docs: [`databricks_library`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/library).
+Refer to the Terraform Registry for docs: [`databricks_library`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/library).
diff --git a/src/library/index.ts b/src/library/index.ts
index 80dbe0339..37b399888 100644
--- a/src/library/index.ts
+++ b/src/library/index.ts
@@ -76,6 +76,31 @@ export function libraryCranToTerraform(struct?: LibraryCranOutputReference | Lib
}
}
+
+export function libraryCranToHclTerraform(struct?: LibraryCranOutputReference | LibraryCran): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class LibraryCranOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -170,6 +195,37 @@ export function libraryMavenToTerraform(struct?: LibraryMavenOutputReference | L
}
}
+
+export function libraryMavenToHclTerraform(struct?: LibraryMavenOutputReference | LibraryMaven): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ coordinates: {
+ value: cdktf.stringToHclTerraform(struct!.coordinates),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ exclusions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.exclusions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class LibraryMavenOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -281,6 +337,31 @@ export function libraryPypiToTerraform(struct?: LibraryPypiOutputReference | Lib
}
}
+
+export function libraryPypiToHclTerraform(struct?: LibraryPypiOutputReference | LibraryPypi): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ package: {
+ value: cdktf.stringToHclTerraform(struct!.package),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class LibraryPypiOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -555,4 +636,60 @@ export class Library extends cdktf.TerraformResource {
pypi: libraryPypiToTerraform(this._pypi.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ egg: {
+ value: cdktf.stringToHclTerraform(this._egg),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jar: {
+ value: cdktf.stringToHclTerraform(this._jar),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ whl: {
+ value: cdktf.stringToHclTerraform(this._whl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cran: {
+ value: libraryCranToHclTerraform(this._cran.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "LibraryCranList",
+ },
+ maven: {
+ value: libraryMavenToHclTerraform(this._maven.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "LibraryMavenList",
+ },
+ pypi: {
+ value: libraryPypiToHclTerraform(this._pypi.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "LibraryPypiList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/metastore-assignment/README.md b/src/metastore-assignment/README.md
index 277877666..60ebcb699 100644
--- a/src/metastore-assignment/README.md
+++ b/src/metastore-assignment/README.md
@@ -1,3 +1,3 @@
# `databricks_metastore_assignment`
-Refer to the Terraform Registory for docs: [`databricks_metastore_assignment`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/metastore_assignment).
+Refer to the Terraform Registry for docs: [`databricks_metastore_assignment`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/metastore_assignment).
diff --git a/src/metastore-assignment/index.ts b/src/metastore-assignment/index.ts
index fd028b8f8..9ceba733b 100644
--- a/src/metastore-assignment/index.ts
+++ b/src/metastore-assignment/index.ts
@@ -164,4 +164,36 @@ export class MetastoreAssignment extends cdktf.TerraformResource {
workspace_id: cdktf.numberToTerraform(this._workspaceId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ default_catalog_name: {
+ value: cdktf.stringToHclTerraform(this._defaultCatalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_id: {
+ value: cdktf.numberToHclTerraform(this._workspaceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/metastore-data-access/README.md b/src/metastore-data-access/README.md
index 22221c457..6ff154927 100644
--- a/src/metastore-data-access/README.md
+++ b/src/metastore-data-access/README.md
@@ -1,3 +1,3 @@
# `databricks_metastore_data_access`
-Refer to the Terraform Registory for docs: [`databricks_metastore_data_access`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/metastore_data_access).
+Refer to the Terraform Registry for docs: [`databricks_metastore_data_access`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/metastore_data_access).
diff --git a/src/metastore-data-access/index.ts b/src/metastore-data-access/index.ts
index 1a228b692..43a7cb3bb 100644
--- a/src/metastore-data-access/index.ts
+++ b/src/metastore-data-access/index.ts
@@ -109,6 +109,37 @@ export function metastoreDataAccessAwsIamRoleToTerraform(struct?: MetastoreDataA
}
}
+
+export function metastoreDataAccessAwsIamRoleToHclTerraform(struct?: MetastoreDataAccessAwsIamRoleOutputReference | MetastoreDataAccessAwsIamRole): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ external_id: {
+ value: cdktf.stringToHclTerraform(struct!.externalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ role_arn: {
+ value: cdktf.stringToHclTerraform(struct!.roleArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ unity_catalog_iam_arn: {
+ value: cdktf.stringToHclTerraform(struct!.unityCatalogIamArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MetastoreDataAccessAwsIamRoleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -225,6 +256,37 @@ export function metastoreDataAccessAzureManagedIdentityToTerraform(struct?: Meta
}
}
+
+export function metastoreDataAccessAzureManagedIdentityToHclTerraform(struct?: MetastoreDataAccessAzureManagedIdentityOutputReference | MetastoreDataAccessAzureManagedIdentity): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ access_connector_id: {
+ value: cdktf.stringToHclTerraform(struct!.accessConnectorId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ credential_id: {
+ value: cdktf.stringToHclTerraform(struct!.credentialId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ managed_identity_id: {
+ value: cdktf.stringToHclTerraform(struct!.managedIdentityId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MetastoreDataAccessAzureManagedIdentityOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -341,6 +403,37 @@ export function metastoreDataAccessAzureServicePrincipalToTerraform(struct?: Met
}
}
+
+export function metastoreDataAccessAzureServicePrincipalToHclTerraform(struct?: MetastoreDataAccessAzureServicePrincipalOutputReference | MetastoreDataAccessAzureServicePrincipal): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ application_id: {
+ value: cdktf.stringToHclTerraform(struct!.applicationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret: {
+ value: cdktf.stringToHclTerraform(struct!.clientSecret),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory_id: {
+ value: cdktf.stringToHclTerraform(struct!.directoryId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MetastoreDataAccessAzureServicePrincipalOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -446,6 +539,31 @@ export function metastoreDataAccessDatabricksGcpServiceAccountToTerraform(struct
}
}
+
+export function metastoreDataAccessDatabricksGcpServiceAccountToHclTerraform(struct?: MetastoreDataAccessDatabricksGcpServiceAccountOutputReference | MetastoreDataAccessDatabricksGcpServiceAccount): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ credential_id: {
+ value: cdktf.stringToHclTerraform(struct!.credentialId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ email: {
+ value: cdktf.stringToHclTerraform(struct!.email),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MetastoreDataAccessDatabricksGcpServiceAccountOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -543,6 +661,37 @@ export function metastoreDataAccessGcpServiceAccountKeyToTerraform(struct?: Meta
}
}
+
+export function metastoreDataAccessGcpServiceAccountKeyToHclTerraform(struct?: MetastoreDataAccessGcpServiceAccountKeyOutputReference | MetastoreDataAccessGcpServiceAccountKey): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ email: {
+ value: cdktf.stringToHclTerraform(struct!.email),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_key: {
+ value: cdktf.stringToHclTerraform(struct!.privateKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_key_id: {
+ value: cdktf.stringToHclTerraform(struct!.privateKeyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MetastoreDataAccessGcpServiceAccountKeyOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -941,4 +1090,96 @@ export class MetastoreDataAccess extends cdktf.TerraformResource {
gcp_service_account_key: metastoreDataAccessGcpServiceAccountKeyToTerraform(this._gcpServiceAccountKey.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ force_destroy: {
+ value: cdktf.booleanToHclTerraform(this._forceDestroy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ force_update: {
+ value: cdktf.booleanToHclTerraform(this._forceUpdate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ is_default: {
+ value: cdktf.booleanToHclTerraform(this._isDefault),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ read_only: {
+ value: cdktf.booleanToHclTerraform(this._readOnly),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ aws_iam_role: {
+ value: metastoreDataAccessAwsIamRoleToHclTerraform(this._awsIamRole.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MetastoreDataAccessAwsIamRoleList",
+ },
+ azure_managed_identity: {
+ value: metastoreDataAccessAzureManagedIdentityToHclTerraform(this._azureManagedIdentity.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MetastoreDataAccessAzureManagedIdentityList",
+ },
+ azure_service_principal: {
+ value: metastoreDataAccessAzureServicePrincipalToHclTerraform(this._azureServicePrincipal.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MetastoreDataAccessAzureServicePrincipalList",
+ },
+ databricks_gcp_service_account: {
+ value: metastoreDataAccessDatabricksGcpServiceAccountToHclTerraform(this._databricksGcpServiceAccount.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MetastoreDataAccessDatabricksGcpServiceAccountList",
+ },
+ gcp_service_account_key: {
+ value: metastoreDataAccessGcpServiceAccountKeyToHclTerraform(this._gcpServiceAccountKey.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MetastoreDataAccessGcpServiceAccountKeyList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/metastore/README.md b/src/metastore/README.md
index 411db201b..5d913a2bb 100644
--- a/src/metastore/README.md
+++ b/src/metastore/README.md
@@ -1,3 +1,3 @@
# `databricks_metastore`
-Refer to the Terraform Registory for docs: [`databricks_metastore`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/metastore).
+Refer to the Terraform Registry for docs: [`databricks_metastore`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/metastore).
diff --git a/src/metastore/index.ts b/src/metastore/index.ts
index 3f8024465..d7a3db64a 100644
--- a/src/metastore/index.ts
+++ b/src/metastore/index.ts
@@ -475,4 +475,120 @@ export class Metastore extends cdktf.TerraformResource {
updated_by: cdktf.stringToTerraform(this._updatedBy),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ cloud: {
+ value: cdktf.stringToHclTerraform(this._cloud),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ created_at: {
+ value: cdktf.numberToHclTerraform(this._createdAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ created_by: {
+ value: cdktf.stringToHclTerraform(this._createdBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ default_data_access_config_id: {
+ value: cdktf.stringToHclTerraform(this._defaultDataAccessConfigId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ delta_sharing_organization_name: {
+ value: cdktf.stringToHclTerraform(this._deltaSharingOrganizationName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ delta_sharing_recipient_token_lifetime_in_seconds: {
+ value: cdktf.numberToHclTerraform(this._deltaSharingRecipientTokenLifetimeInSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ delta_sharing_scope: {
+ value: cdktf.stringToHclTerraform(this._deltaSharingScope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ force_destroy: {
+ value: cdktf.booleanToHclTerraform(this._forceDestroy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ global_metastore_id: {
+ value: cdktf.stringToHclTerraform(this._globalMetastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(this._region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_root: {
+ value: cdktf.stringToHclTerraform(this._storageRoot),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_root_credential_id: {
+ value: cdktf.stringToHclTerraform(this._storageRootCredentialId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ updated_at: {
+ value: cdktf.numberToHclTerraform(this._updatedAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ updated_by: {
+ value: cdktf.stringToHclTerraform(this._updatedBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mlflow-experiment/README.md b/src/mlflow-experiment/README.md
index 30496ec30..7412c7dac 100644
--- a/src/mlflow-experiment/README.md
+++ b/src/mlflow-experiment/README.md
@@ -1,3 +1,3 @@
# `databricks_mlflow_experiment`
-Refer to the Terraform Registory for docs: [`databricks_mlflow_experiment`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mlflow_experiment).
+Refer to the Terraform Registry for docs: [`databricks_mlflow_experiment`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mlflow_experiment).
diff --git a/src/mlflow-experiment/index.ts b/src/mlflow-experiment/index.ts
index f125842de..27987e87d 100644
--- a/src/mlflow-experiment/index.ts
+++ b/src/mlflow-experiment/index.ts
@@ -66,6 +66,17 @@ export function mlflowExperimentTimeoutsToTerraform(struct?: MlflowExperimentTim
}
}
+
+export function mlflowExperimentTimeoutsToHclTerraform(struct?: MlflowExperimentTimeouts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ };
+ return attrs;
+}
+
export class MlflowExperimentTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -324,4 +335,66 @@ export class MlflowExperiment extends cdktf.TerraformResource {
timeouts: mlflowExperimentTimeoutsToTerraform(this._timeouts.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ artifact_location: {
+ value: cdktf.stringToHclTerraform(this._artifactLocation),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ creation_time: {
+ value: cdktf.numberToHclTerraform(this._creationTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ experiment_id: {
+ value: cdktf.stringToHclTerraform(this._experimentId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ last_update_time: {
+ value: cdktf.numberToHclTerraform(this._lastUpdateTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ lifecycle_stage: {
+ value: cdktf.stringToHclTerraform(this._lifecycleStage),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ timeouts: {
+ value: mlflowExperimentTimeoutsToHclTerraform(this._timeouts.internalValue),
+ isBlock: true,
+ type: "struct",
+ storageClassType: "MlflowExperimentTimeouts",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mlflow-model/README.md b/src/mlflow-model/README.md
index 23d4892cf..9a52811ec 100644
--- a/src/mlflow-model/README.md
+++ b/src/mlflow-model/README.md
@@ -1,3 +1,3 @@
# `databricks_mlflow_model`
-Refer to the Terraform Registory for docs: [`databricks_mlflow_model`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mlflow_model).
+Refer to the Terraform Registry for docs: [`databricks_mlflow_model`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mlflow_model).
diff --git a/src/mlflow-model/index.ts b/src/mlflow-model/index.ts
index 4cca19a8c..e37fc9be7 100644
--- a/src/mlflow-model/index.ts
+++ b/src/mlflow-model/index.ts
@@ -56,6 +56,31 @@ export function mlflowModelTagsToTerraform(struct?: MlflowModelTags | cdktf.IRes
}
}
+
+export function mlflowModelTagsToHclTerraform(struct?: MlflowModelTags | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ key: {
+ value: cdktf.stringToHclTerraform(struct!.key),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MlflowModelTagsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -298,4 +323,36 @@ export class MlflowModel extends cdktf.TerraformResource {
tags: cdktf.listMapper(mlflowModelTagsToTerraform, true)(this._tags.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tags: {
+ value: cdktf.listMapperHcl(mlflowModelTagsToHclTerraform, true)(this._tags.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MlflowModelTagsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mlflow-webhook/README.md b/src/mlflow-webhook/README.md
index 5bd8d67e1..c839ad1a8 100644
--- a/src/mlflow-webhook/README.md
+++ b/src/mlflow-webhook/README.md
@@ -1,3 +1,3 @@
# `databricks_mlflow_webhook`
-Refer to the Terraform Registory for docs: [`databricks_mlflow_webhook`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mlflow_webhook).
+Refer to the Terraform Registry for docs: [`databricks_mlflow_webhook`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mlflow_webhook).
diff --git a/src/mlflow-webhook/index.ts b/src/mlflow-webhook/index.ts
index f71f535f4..81f5d4da4 100644
--- a/src/mlflow-webhook/index.ts
+++ b/src/mlflow-webhook/index.ts
@@ -80,6 +80,43 @@ export function mlflowWebhookHttpUrlSpecToTerraform(struct?: MlflowWebhookHttpUr
}
}
+
+export function mlflowWebhookHttpUrlSpecToHclTerraform(struct?: MlflowWebhookHttpUrlSpecOutputReference | MlflowWebhookHttpUrlSpec): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ authorization: {
+ value: cdktf.stringToHclTerraform(struct!.authorization),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_ssl_verification: {
+ value: cdktf.booleanToHclTerraform(struct!.enableSslVerification),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ secret: {
+ value: cdktf.stringToHclTerraform(struct!.secret),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ url: {
+ value: cdktf.stringToHclTerraform(struct!.url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MlflowWebhookHttpUrlSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -218,6 +255,37 @@ export function mlflowWebhookJobSpecToTerraform(struct?: MlflowWebhookJobSpecOut
}
}
+
+export function mlflowWebhookJobSpecToHclTerraform(struct?: MlflowWebhookJobSpecOutputReference | MlflowWebhookJobSpec): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ access_token: {
+ value: cdktf.stringToHclTerraform(struct!.accessToken),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_id: {
+ value: cdktf.stringToHclTerraform(struct!.jobId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_url: {
+ value: cdktf.stringToHclTerraform(struct!.workspaceUrl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MlflowWebhookJobSpecOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -493,4 +561,54 @@ export class MlflowWebhook extends cdktf.TerraformResource {
job_spec: mlflowWebhookJobSpecToTerraform(this._jobSpec.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ events: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._events),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ model_name: {
+ value: cdktf.stringToHclTerraform(this._modelName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ status: {
+ value: cdktf.stringToHclTerraform(this._status),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ http_url_spec: {
+ value: mlflowWebhookHttpUrlSpecToHclTerraform(this._httpUrlSpec.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MlflowWebhookHttpUrlSpecList",
+ },
+ job_spec: {
+ value: mlflowWebhookJobSpecToHclTerraform(this._jobSpec.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MlflowWebhookJobSpecList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/model-serving/README.md b/src/model-serving/README.md
index 984473871..6b17ec808 100644
--- a/src/model-serving/README.md
+++ b/src/model-serving/README.md
@@ -1,3 +1,3 @@
# `databricks_model_serving`
-Refer to the Terraform Registory for docs: [`databricks_model_serving`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/model_serving).
+Refer to the Terraform Registry for docs: [`databricks_model_serving`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/model_serving).
diff --git a/src/model-serving/index.ts b/src/model-serving/index.ts
index c289164a2..3273f8759 100644
--- a/src/model-serving/index.ts
+++ b/src/model-serving/index.ts
@@ -80,6 +80,43 @@ export function modelServingConfigAutoCaptureConfigToTerraform(struct?: ModelSer
}
}
+
+export function modelServingConfigAutoCaptureConfigToHclTerraform(struct?: ModelServingConfigAutoCaptureConfigOutputReference | ModelServingConfigAutoCaptureConfig): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(struct!.catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enabled: {
+ value: cdktf.booleanToHclTerraform(struct!.enabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ schema_name: {
+ value: cdktf.stringToHclTerraform(struct!.schemaName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ table_name_prefix: {
+ value: cdktf.stringToHclTerraform(struct!.tableNamePrefix),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ModelServingConfigAutoCaptureConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -246,6 +283,67 @@ export function modelServingConfigServedModelsToTerraform(struct?: ModelServingC
}
}
+
+export function modelServingConfigServedModelsToHclTerraform(struct?: ModelServingConfigServedModels | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ environment_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.environmentVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ model_name: {
+ value: cdktf.stringToHclTerraform(struct!.modelName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ model_version: {
+ value: cdktf.stringToHclTerraform(struct!.modelVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ scale_to_zero_enabled: {
+ value: cdktf.booleanToHclTerraform(struct!.scaleToZeroEnabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ workload_size: {
+ value: cdktf.stringToHclTerraform(struct!.workloadSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workload_type: {
+ value: cdktf.stringToHclTerraform(struct!.workloadType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ModelServingConfigServedModelsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -493,6 +591,31 @@ export function modelServingConfigTrafficConfigRoutesToTerraform(struct?: ModelS
}
}
+
+export function modelServingConfigTrafficConfigRoutesToHclTerraform(struct?: ModelServingConfigTrafficConfigRoutes | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ served_model_name: {
+ value: cdktf.stringToHclTerraform(struct!.servedModelName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ traffic_percentage: {
+ value: cdktf.numberToHclTerraform(struct!.trafficPercentage),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ModelServingConfigTrafficConfigRoutesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -608,6 +731,25 @@ export function modelServingConfigTrafficConfigToTerraform(struct?: ModelServing
}
}
+
+export function modelServingConfigTrafficConfigToHclTerraform(struct?: ModelServingConfigTrafficConfigOutputReference | ModelServingConfigTrafficConfig): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ routes: {
+ value: cdktf.listMapperHcl(modelServingConfigTrafficConfigRoutesToHclTerraform, true)(struct!.routes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ModelServingConfigTrafficConfigRoutesList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ModelServingConfigTrafficConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -689,6 +831,37 @@ export function modelServingConfigAToTerraform(struct?: ModelServingConfigAOutpu
}
}
+
+export function modelServingConfigAToHclTerraform(struct?: ModelServingConfigAOutputReference | ModelServingConfigA): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ auto_capture_config: {
+ value: modelServingConfigAutoCaptureConfigToHclTerraform(struct!.autoCaptureConfig),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ModelServingConfigAutoCaptureConfigList",
+ },
+ served_models: {
+ value: cdktf.listMapperHcl(modelServingConfigServedModelsToHclTerraform, true)(struct!.servedModels),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ModelServingConfigServedModelsList",
+ },
+ traffic_config: {
+ value: modelServingConfigTrafficConfigToHclTerraform(struct!.trafficConfig),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ModelServingConfigTrafficConfigList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ModelServingConfigAOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -808,6 +981,37 @@ export function modelServingRateLimitsToTerraform(struct?: ModelServingRateLimit
}
}
+
+export function modelServingRateLimitsToHclTerraform(struct?: ModelServingRateLimits | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ calls: {
+ value: cdktf.numberToHclTerraform(struct!.calls),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ key: {
+ value: cdktf.stringToHclTerraform(struct!.key),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ renewal_period: {
+ value: cdktf.stringToHclTerraform(struct!.renewalPeriod),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ModelServingRateLimitsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -948,6 +1152,31 @@ export function modelServingTagsToTerraform(struct?: ModelServingTags | cdktf.IR
}
}
+
+export function modelServingTagsToHclTerraform(struct?: ModelServingTags | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ key: {
+ value: cdktf.stringToHclTerraform(struct!.key),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ModelServingTagsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1069,6 +1298,31 @@ export function modelServingTimeoutsToTerraform(struct?: ModelServingTimeouts |
}
}
+
+export function modelServingTimeoutsToHclTerraform(struct?: ModelServingTimeouts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ create: {
+ value: cdktf.stringToHclTerraform(struct!.create),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ update: {
+ value: cdktf.stringToHclTerraform(struct!.update),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ModelServingTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1322,4 +1576,48 @@ export class ModelServing extends cdktf.TerraformResource {
timeouts: modelServingTimeoutsToTerraform(this._timeouts.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ config: {
+ value: modelServingConfigAToHclTerraform(this._config.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ModelServingConfigAList",
+ },
+ rate_limits: {
+ value: cdktf.listMapperHcl(modelServingRateLimitsToHclTerraform, true)(this._rateLimits.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ModelServingRateLimitsList",
+ },
+ tags: {
+ value: cdktf.listMapperHcl(modelServingTagsToHclTerraform, true)(this._tags.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ModelServingTagsList",
+ },
+ timeouts: {
+ value: modelServingTimeoutsToHclTerraform(this._timeouts.internalValue),
+ isBlock: true,
+ type: "struct",
+ storageClassType: "ModelServingTimeouts",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mount/README.md b/src/mount/README.md
index c3e549ffc..51ff0057b 100644
--- a/src/mount/README.md
+++ b/src/mount/README.md
@@ -1,3 +1,3 @@
# `databricks_mount`
-Refer to the Terraform Registory for docs: [`databricks_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mount).
+Refer to the Terraform Registry for docs: [`databricks_mount`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mount).
diff --git a/src/mount/index.ts b/src/mount/index.ts
index 5c660fe26..cf22b185f 100644
--- a/src/mount/index.ts
+++ b/src/mount/index.ts
@@ -132,6 +132,67 @@ export function mountAbfsToTerraform(struct?: MountAbfsOutputReference | MountAb
}
}
+
+export function mountAbfsToHclTerraform(struct?: MountAbfsOutputReference | MountAbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ client_id: {
+ value: cdktf.stringToHclTerraform(struct!.clientId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret_key: {
+ value: cdktf.stringToHclTerraform(struct!.clientSecretKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret_scope: {
+ value: cdktf.stringToHclTerraform(struct!.clientSecretScope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ container_name: {
+ value: cdktf.stringToHclTerraform(struct!.containerName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory: {
+ value: cdktf.stringToHclTerraform(struct!.directory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ initialize_file_system: {
+ value: cdktf.booleanToHclTerraform(struct!.initializeFileSystem),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ storage_account_name: {
+ value: cdktf.stringToHclTerraform(struct!.storageAccountName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tenant_id: {
+ value: cdktf.stringToHclTerraform(struct!.tenantId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MountAbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -369,6 +430,61 @@ export function mountAdlToTerraform(struct?: MountAdlOutputReference | MountAdl)
}
}
+
+export function mountAdlToHclTerraform(struct?: MountAdlOutputReference | MountAdl): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ client_id: {
+ value: cdktf.stringToHclTerraform(struct!.clientId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret_key: {
+ value: cdktf.stringToHclTerraform(struct!.clientSecretKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret_scope: {
+ value: cdktf.stringToHclTerraform(struct!.clientSecretScope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory: {
+ value: cdktf.stringToHclTerraform(struct!.directory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf_prefix: {
+ value: cdktf.stringToHclTerraform(struct!.sparkConfPrefix),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_resource_name: {
+ value: cdktf.stringToHclTerraform(struct!.storageResourceName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tenant_id: {
+ value: cdktf.stringToHclTerraform(struct!.tenantId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MountAdlOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -562,6 +678,31 @@ export function mountGsToTerraform(struct?: MountGsOutputReference | MountGs): a
}
}
+
+export function mountGsToHclTerraform(struct?: MountGsOutputReference | MountGs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ bucket_name: {
+ value: cdktf.stringToHclTerraform(struct!.bucketName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ service_account: {
+ value: cdktf.stringToHclTerraform(struct!.serviceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MountGsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -651,6 +792,31 @@ export function mountS3ToTerraform(struct?: MountS3OutputReference | MountS3): a
}
}
+
+export function mountS3ToHclTerraform(struct?: MountS3OutputReference | MountS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ bucket_name: {
+ value: cdktf.stringToHclTerraform(struct!.bucketName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profile: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MountS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -735,6 +901,25 @@ export function mountTimeoutsToTerraform(struct?: MountTimeouts | cdktf.IResolva
}
}
+
+export function mountTimeoutsToHclTerraform(struct?: MountTimeouts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ default: {
+ value: cdktf.stringToHclTerraform(struct!.default),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MountTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -835,6 +1020,55 @@ export function mountWasbToTerraform(struct?: MountWasbOutputReference | MountWa
}
}
+
+export function mountWasbToHclTerraform(struct?: MountWasbOutputReference | MountWasb): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ auth_type: {
+ value: cdktf.stringToHclTerraform(struct!.authType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ container_name: {
+ value: cdktf.stringToHclTerraform(struct!.containerName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory: {
+ value: cdktf.stringToHclTerraform(struct!.directory),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_account_name: {
+ value: cdktf.stringToHclTerraform(struct!.storageAccountName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ token_secret_key: {
+ value: cdktf.stringToHclTerraform(struct!.tokenSecretKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ token_secret_scope: {
+ value: cdktf.stringToHclTerraform(struct!.tokenSecretScope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MountWasbOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1289,4 +1523,90 @@ export class Mount extends cdktf.TerraformResource {
wasb: mountWasbToTerraform(this._wasb.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(this._encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ extra_configs: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._extraConfigs),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ resource_id: {
+ value: cdktf.stringToHclTerraform(this._resourceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ uri: {
+ value: cdktf.stringToHclTerraform(this._uri),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ abfs: {
+ value: mountAbfsToHclTerraform(this._abfs.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MountAbfsList",
+ },
+ adl: {
+ value: mountAdlToHclTerraform(this._adl.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MountAdlList",
+ },
+ gs: {
+ value: mountGsToHclTerraform(this._gs.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MountGsList",
+ },
+ s3: {
+ value: mountS3ToHclTerraform(this._s3.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MountS3List",
+ },
+ timeouts: {
+ value: mountTimeoutsToHclTerraform(this._timeouts.internalValue),
+ isBlock: true,
+ type: "struct",
+ storageClassType: "MountTimeouts",
+ },
+ wasb: {
+ value: mountWasbToHclTerraform(this._wasb.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MountWasbList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mws-credentials/README.md b/src/mws-credentials/README.md
index 785f3cd35..8eb74162c 100644
--- a/src/mws-credentials/README.md
+++ b/src/mws-credentials/README.md
@@ -1,3 +1,3 @@
# `databricks_mws_credentials`
-Refer to the Terraform Registory for docs: [`databricks_mws_credentials`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_credentials).
+Refer to the Terraform Registry for docs: [`databricks_mws_credentials`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_credentials).
diff --git a/src/mws-credentials/index.ts b/src/mws-credentials/index.ts
index 28032562c..8fc4cac2c 100644
--- a/src/mws-credentials/index.ts
+++ b/src/mws-credentials/index.ts
@@ -230,4 +230,54 @@ export class MwsCredentials extends cdktf.TerraformResource {
role_arn: cdktf.stringToTerraform(this._roleArn),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ creation_time: {
+ value: cdktf.numberToHclTerraform(this._creationTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ credentials_id: {
+ value: cdktf.stringToHclTerraform(this._credentialsId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ credentials_name: {
+ value: cdktf.stringToHclTerraform(this._credentialsName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ external_id: {
+ value: cdktf.stringToHclTerraform(this._externalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ role_arn: {
+ value: cdktf.stringToHclTerraform(this._roleArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mws-customer-managed-keys/README.md b/src/mws-customer-managed-keys/README.md
index e8d16aac6..4c68b72e4 100644
--- a/src/mws-customer-managed-keys/README.md
+++ b/src/mws-customer-managed-keys/README.md
@@ -1,3 +1,3 @@
# `databricks_mws_customer_managed_keys`
-Refer to the Terraform Registory for docs: [`databricks_mws_customer_managed_keys`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_customer_managed_keys).
+Refer to the Terraform Registry for docs: [`databricks_mws_customer_managed_keys`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_customer_managed_keys).
diff --git a/src/mws-customer-managed-keys/index.ts b/src/mws-customer-managed-keys/index.ts
index 8f0494ff6..8a4b9517f 100644
--- a/src/mws-customer-managed-keys/index.ts
+++ b/src/mws-customer-managed-keys/index.ts
@@ -75,6 +75,37 @@ export function mwsCustomerManagedKeysAwsKeyInfoToTerraform(struct?: MwsCustomer
}
}
+
+export function mwsCustomerManagedKeysAwsKeyInfoToHclTerraform(struct?: MwsCustomerManagedKeysAwsKeyInfoOutputReference | MwsCustomerManagedKeysAwsKeyInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ key_alias: {
+ value: cdktf.stringToHclTerraform(struct!.keyAlias),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ key_arn: {
+ value: cdktf.stringToHclTerraform(struct!.keyArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ key_region: {
+ value: cdktf.stringToHclTerraform(struct!.keyRegion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsCustomerManagedKeysAwsKeyInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -181,6 +212,25 @@ export function mwsCustomerManagedKeysGcpKeyInfoToTerraform(struct?: MwsCustomer
}
}
+
+export function mwsCustomerManagedKeysGcpKeyInfoToHclTerraform(struct?: MwsCustomerManagedKeysGcpKeyInfoOutputReference | MwsCustomerManagedKeysGcpKeyInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ kms_key_id: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKeyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsCustomerManagedKeysGcpKeyInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -412,4 +462,54 @@ export class MwsCustomerManagedKeys extends cdktf.TerraformResource {
gcp_key_info: mwsCustomerManagedKeysGcpKeyInfoToTerraform(this._gcpKeyInfo.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ creation_time: {
+ value: cdktf.numberToHclTerraform(this._creationTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ customer_managed_key_id: {
+ value: cdktf.stringToHclTerraform(this._customerManagedKeyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ use_cases: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._useCases),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ aws_key_info: {
+ value: mwsCustomerManagedKeysAwsKeyInfoToHclTerraform(this._awsKeyInfo.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsCustomerManagedKeysAwsKeyInfoList",
+ },
+ gcp_key_info: {
+ value: mwsCustomerManagedKeysGcpKeyInfoToHclTerraform(this._gcpKeyInfo.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsCustomerManagedKeysGcpKeyInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mws-log-delivery/README.md b/src/mws-log-delivery/README.md
index 3265d9b6e..97479a5fa 100644
--- a/src/mws-log-delivery/README.md
+++ b/src/mws-log-delivery/README.md
@@ -1,3 +1,3 @@
# `databricks_mws_log_delivery`
-Refer to the Terraform Registory for docs: [`databricks_mws_log_delivery`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_log_delivery).
+Refer to the Terraform Registry for docs: [`databricks_mws_log_delivery`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_log_delivery).
diff --git a/src/mws-log-delivery/index.ts b/src/mws-log-delivery/index.ts
index c1b7cb9c8..0df8c176d 100644
--- a/src/mws-log-delivery/index.ts
+++ b/src/mws-log-delivery/index.ts
@@ -331,4 +331,84 @@ export class MwsLogDelivery extends cdktf.TerraformResource {
workspace_ids_filter: cdktf.listMapper(cdktf.numberToTerraform, false)(this._workspaceIdsFilter),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ config_id: {
+ value: cdktf.stringToHclTerraform(this._configId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ config_name: {
+ value: cdktf.stringToHclTerraform(this._configName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ credentials_id: {
+ value: cdktf.stringToHclTerraform(this._credentialsId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ delivery_path_prefix: {
+ value: cdktf.stringToHclTerraform(this._deliveryPathPrefix),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ delivery_start_time: {
+ value: cdktf.stringToHclTerraform(this._deliveryStartTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ log_type: {
+ value: cdktf.stringToHclTerraform(this._logType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ output_format: {
+ value: cdktf.stringToHclTerraform(this._outputFormat),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ status: {
+ value: cdktf.stringToHclTerraform(this._status),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_configuration_id: {
+ value: cdktf.stringToHclTerraform(this._storageConfigurationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_ids_filter: {
+ value: cdktf.listMapperHcl(cdktf.numberToHclTerraform, false)(this._workspaceIdsFilter),
+ isBlock: false,
+ type: "list",
+ storageClassType: "numberList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mws-networks/README.md b/src/mws-networks/README.md
index 5fd17f8a9..48633beb3 100644
--- a/src/mws-networks/README.md
+++ b/src/mws-networks/README.md
@@ -1,3 +1,3 @@
# `databricks_mws_networks`
-Refer to the Terraform Registory for docs: [`databricks_mws_networks`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_networks).
+Refer to the Terraform Registry for docs: [`databricks_mws_networks`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_networks).
diff --git a/src/mws-networks/index.ts b/src/mws-networks/index.ts
index ed3a77462..d17c79242 100644
--- a/src/mws-networks/index.ts
+++ b/src/mws-networks/index.ts
@@ -96,6 +96,31 @@ export function mwsNetworksErrorMessagesToTerraform(struct?: MwsNetworksErrorMes
}
}
+
+export function mwsNetworksErrorMessagesToHclTerraform(struct?: MwsNetworksErrorMessages | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ error_message: {
+ value: cdktf.stringToHclTerraform(struct!.errorMessage),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ error_type: {
+ value: cdktf.stringToHclTerraform(struct!.errorType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsNetworksErrorMessagesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -240,6 +265,55 @@ export function mwsNetworksGcpNetworkInfoToTerraform(struct?: MwsNetworksGcpNetw
}
}
+
+export function mwsNetworksGcpNetworkInfoToHclTerraform(struct?: MwsNetworksGcpNetworkInfoOutputReference | MwsNetworksGcpNetworkInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ network_project_id: {
+ value: cdktf.stringToHclTerraform(struct!.networkProjectId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ pod_ip_range_name: {
+ value: cdktf.stringToHclTerraform(struct!.podIpRangeName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ service_ip_range_name: {
+ value: cdktf.stringToHclTerraform(struct!.serviceIpRangeName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ subnet_id: {
+ value: cdktf.stringToHclTerraform(struct!.subnetId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ subnet_region: {
+ value: cdktf.stringToHclTerraform(struct!.subnetRegion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ vpc_id: {
+ value: cdktf.stringToHclTerraform(struct!.vpcId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsNetworksGcpNetworkInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -402,6 +476,31 @@ export function mwsNetworksVpcEndpointsToTerraform(struct?: MwsNetworksVpcEndpoi
}
}
+
+export function mwsNetworksVpcEndpointsToHclTerraform(struct?: MwsNetworksVpcEndpointsOutputReference | MwsNetworksVpcEndpoints): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dataplane_relay: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.dataplaneRelay),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ rest_api: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.restApi),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsNetworksVpcEndpointsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -760,4 +859,90 @@ export class MwsNetworks extends cdktf.TerraformResource {
vpc_endpoints: mwsNetworksVpcEndpointsToTerraform(this._vpcEndpoints.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ creation_time: {
+ value: cdktf.numberToHclTerraform(this._creationTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_id: {
+ value: cdktf.stringToHclTerraform(this._networkId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_name: {
+ value: cdktf.stringToHclTerraform(this._networkName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ security_group_ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._securityGroupIds),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ subnet_ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._subnetIds),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ vpc_id: {
+ value: cdktf.stringToHclTerraform(this._vpcId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ vpc_status: {
+ value: cdktf.stringToHclTerraform(this._vpcStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_id: {
+ value: cdktf.numberToHclTerraform(this._workspaceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ error_messages: {
+ value: cdktf.listMapperHcl(mwsNetworksErrorMessagesToHclTerraform, true)(this._errorMessages.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsNetworksErrorMessagesList",
+ },
+ gcp_network_info: {
+ value: mwsNetworksGcpNetworkInfoToHclTerraform(this._gcpNetworkInfo.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsNetworksGcpNetworkInfoList",
+ },
+ vpc_endpoints: {
+ value: mwsNetworksVpcEndpointsToHclTerraform(this._vpcEndpoints.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsNetworksVpcEndpointsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mws-permission-assignment/README.md b/src/mws-permission-assignment/README.md
index 305e36c3e..9146704fb 100644
--- a/src/mws-permission-assignment/README.md
+++ b/src/mws-permission-assignment/README.md
@@ -1,3 +1,3 @@
# `databricks_mws_permission_assignment`
-Refer to the Terraform Registory for docs: [`databricks_mws_permission_assignment`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_permission_assignment).
+Refer to the Terraform Registry for docs: [`databricks_mws_permission_assignment`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_permission_assignment).
diff --git a/src/mws-permission-assignment/index.ts b/src/mws-permission-assignment/index.ts
index 2583422f7..db5aaa955 100644
--- a/src/mws-permission-assignment/index.ts
+++ b/src/mws-permission-assignment/index.ts
@@ -161,4 +161,36 @@ export class MwsPermissionAssignment extends cdktf.TerraformResource {
workspace_id: cdktf.numberToTerraform(this._workspaceId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ permissions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._permissions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ principal_id: {
+ value: cdktf.numberToHclTerraform(this._principalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ workspace_id: {
+ value: cdktf.numberToHclTerraform(this._workspaceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mws-private-access-settings/README.md b/src/mws-private-access-settings/README.md
index c4d9f8a46..f641422d4 100644
--- a/src/mws-private-access-settings/README.md
+++ b/src/mws-private-access-settings/README.md
@@ -1,3 +1,3 @@
# `databricks_mws_private_access_settings`
-Refer to the Terraform Registory for docs: [`databricks_mws_private_access_settings`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_private_access_settings).
+Refer to the Terraform Registry for docs: [`databricks_mws_private_access_settings`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_private_access_settings).
diff --git a/src/mws-private-access-settings/index.ts b/src/mws-private-access-settings/index.ts
index b4a92e8c9..06526a996 100644
--- a/src/mws-private-access-settings/index.ts
+++ b/src/mws-private-access-settings/index.ts
@@ -274,4 +274,66 @@ export class MwsPrivateAccessSettings extends cdktf.TerraformResource {
status: cdktf.stringToTerraform(this._status),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ allowed_vpc_endpoint_ids: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._allowedVpcEndpointIds),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_access_level: {
+ value: cdktf.stringToHclTerraform(this._privateAccessLevel),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_access_settings_id: {
+ value: cdktf.stringToHclTerraform(this._privateAccessSettingsId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_access_settings_name: {
+ value: cdktf.stringToHclTerraform(this._privateAccessSettingsName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ public_access_enabled: {
+ value: cdktf.booleanToHclTerraform(this._publicAccessEnabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(this._region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ status: {
+ value: cdktf.stringToHclTerraform(this._status),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mws-storage-configurations/README.md b/src/mws-storage-configurations/README.md
index ab7136d4b..443b26535 100644
--- a/src/mws-storage-configurations/README.md
+++ b/src/mws-storage-configurations/README.md
@@ -1,3 +1,3 @@
# `databricks_mws_storage_configurations`
-Refer to the Terraform Registory for docs: [`databricks_mws_storage_configurations`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_storage_configurations).
+Refer to the Terraform Registry for docs: [`databricks_mws_storage_configurations`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_storage_configurations).
diff --git a/src/mws-storage-configurations/index.ts b/src/mws-storage-configurations/index.ts
index 21226f16b..73f43d01f 100644
--- a/src/mws-storage-configurations/index.ts
+++ b/src/mws-storage-configurations/index.ts
@@ -171,4 +171,36 @@ export class MwsStorageConfigurations extends cdktf.TerraformResource {
storage_configuration_name: cdktf.stringToTerraform(this._storageConfigurationName),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ bucket_name: {
+ value: cdktf.stringToHclTerraform(this._bucketName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_configuration_name: {
+ value: cdktf.stringToHclTerraform(this._storageConfigurationName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mws-vpc-endpoint/README.md b/src/mws-vpc-endpoint/README.md
index 4a1aa1ace..e3626c27c 100644
--- a/src/mws-vpc-endpoint/README.md
+++ b/src/mws-vpc-endpoint/README.md
@@ -1,3 +1,3 @@
# `databricks_mws_vpc_endpoint`
-Refer to the Terraform Registory for docs: [`databricks_mws_vpc_endpoint`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_vpc_endpoint).
+Refer to the Terraform Registry for docs: [`databricks_mws_vpc_endpoint`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_vpc_endpoint).
diff --git a/src/mws-vpc-endpoint/index.ts b/src/mws-vpc-endpoint/index.ts
index bf22bce71..e2b8fdef7 100644
--- a/src/mws-vpc-endpoint/index.ts
+++ b/src/mws-vpc-endpoint/index.ts
@@ -99,6 +99,49 @@ export function mwsVpcEndpointGcpVpcEndpointInfoToTerraform(struct?: MwsVpcEndpo
}
}
+
+export function mwsVpcEndpointGcpVpcEndpointInfoToHclTerraform(struct?: MwsVpcEndpointGcpVpcEndpointInfoOutputReference | MwsVpcEndpointGcpVpcEndpointInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ endpoint_region: {
+ value: cdktf.stringToHclTerraform(struct!.endpointRegion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ project_id: {
+ value: cdktf.stringToHclTerraform(struct!.projectId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ psc_connection_id: {
+ value: cdktf.stringToHclTerraform(struct!.pscConnectionId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ psc_endpoint_name: {
+ value: cdktf.stringToHclTerraform(struct!.pscEndpointName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ service_attachment_id: {
+ value: cdktf.stringToHclTerraform(struct!.serviceAttachmentId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsVpcEndpointGcpVpcEndpointInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -487,4 +530,78 @@ export class MwsVpcEndpoint extends cdktf.TerraformResource {
gcp_vpc_endpoint_info: mwsVpcEndpointGcpVpcEndpointInfoToTerraform(this._gcpVpcEndpointInfo.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ aws_account_id: {
+ value: cdktf.stringToHclTerraform(this._awsAccountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ aws_endpoint_service_id: {
+ value: cdktf.stringToHclTerraform(this._awsEndpointServiceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ aws_vpc_endpoint_id: {
+ value: cdktf.stringToHclTerraform(this._awsVpcEndpointId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(this._region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ state: {
+ value: cdktf.stringToHclTerraform(this._state),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ use_case: {
+ value: cdktf.stringToHclTerraform(this._useCase),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ vpc_endpoint_id: {
+ value: cdktf.stringToHclTerraform(this._vpcEndpointId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ vpc_endpoint_name: {
+ value: cdktf.stringToHclTerraform(this._vpcEndpointName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ gcp_vpc_endpoint_info: {
+ value: mwsVpcEndpointGcpVpcEndpointInfoToHclTerraform(this._gcpVpcEndpointInfo.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsVpcEndpointGcpVpcEndpointInfoList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/mws-workspaces/README.md b/src/mws-workspaces/README.md
index 1f0209acb..c58fb6c80 100644
--- a/src/mws-workspaces/README.md
+++ b/src/mws-workspaces/README.md
@@ -1,3 +1,3 @@
# `databricks_mws_workspaces`
-Refer to the Terraform Registory for docs: [`databricks_mws_workspaces`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_workspaces).
+Refer to the Terraform Registry for docs: [`databricks_mws_workspaces`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/mws_workspaces).
diff --git a/src/mws-workspaces/index.ts b/src/mws-workspaces/index.ts
index d38fc1ca1..c77602638 100644
--- a/src/mws-workspaces/index.ts
+++ b/src/mws-workspaces/index.ts
@@ -153,6 +153,25 @@ export function mwsWorkspacesCloudResourceContainerGcpToTerraform(struct?: MwsWo
}
}
+
+export function mwsWorkspacesCloudResourceContainerGcpToHclTerraform(struct?: MwsWorkspacesCloudResourceContainerGcpOutputReference | MwsWorkspacesCloudResourceContainerGcp): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ project_id: {
+ value: cdktf.stringToHclTerraform(struct!.projectId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsWorkspacesCloudResourceContainerGcpOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -217,6 +236,25 @@ export function mwsWorkspacesCloudResourceContainerToTerraform(struct?: MwsWorks
}
}
+
+export function mwsWorkspacesCloudResourceContainerToHclTerraform(struct?: MwsWorkspacesCloudResourceContainerOutputReference | MwsWorkspacesCloudResourceContainer): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ gcp: {
+ value: mwsWorkspacesCloudResourceContainerGcpToHclTerraform(struct!.gcp),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsWorkspacesCloudResourceContainerGcpList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsWorkspacesCloudResourceContainerOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -289,6 +327,37 @@ export function mwsWorkspacesExternalCustomerInfoToTerraform(struct?: MwsWorkspa
}
}
+
+export function mwsWorkspacesExternalCustomerInfoToHclTerraform(struct?: MwsWorkspacesExternalCustomerInfoOutputReference | MwsWorkspacesExternalCustomerInfo): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ authoritative_user_email: {
+ value: cdktf.stringToHclTerraform(struct!.authoritativeUserEmail),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ authoritative_user_full_name: {
+ value: cdktf.stringToHclTerraform(struct!.authoritativeUserFullName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ customer_name: {
+ value: cdktf.stringToHclTerraform(struct!.customerName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsWorkspacesExternalCustomerInfoOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -399,6 +468,37 @@ export function mwsWorkspacesGcpManagedNetworkConfigToTerraform(struct?: MwsWork
}
}
+
+export function mwsWorkspacesGcpManagedNetworkConfigToHclTerraform(struct?: MwsWorkspacesGcpManagedNetworkConfigOutputReference | MwsWorkspacesGcpManagedNetworkConfig): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ gke_cluster_pod_ip_range: {
+ value: cdktf.stringToHclTerraform(struct!.gkeClusterPodIpRange),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ gke_cluster_service_ip_range: {
+ value: cdktf.stringToHclTerraform(struct!.gkeClusterServiceIpRange),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ subnet_cidr: {
+ value: cdktf.stringToHclTerraform(struct!.subnetCidr),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsWorkspacesGcpManagedNetworkConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -504,6 +604,31 @@ export function mwsWorkspacesGkeConfigToTerraform(struct?: MwsWorkspacesGkeConfi
}
}
+
+export function mwsWorkspacesGkeConfigToHclTerraform(struct?: MwsWorkspacesGkeConfigOutputReference | MwsWorkspacesGkeConfig): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ connectivity_type: {
+ value: cdktf.stringToHclTerraform(struct!.connectivityType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ master_ip_range: {
+ value: cdktf.stringToHclTerraform(struct!.masterIpRange),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsWorkspacesGkeConfigOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -595,6 +720,37 @@ export function mwsWorkspacesTimeoutsToTerraform(struct?: MwsWorkspacesTimeouts
}
}
+
+export function mwsWorkspacesTimeoutsToHclTerraform(struct?: MwsWorkspacesTimeouts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ create: {
+ value: cdktf.stringToHclTerraform(struct!.create),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ read: {
+ value: cdktf.stringToHclTerraform(struct!.read),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ update: {
+ value: cdktf.stringToHclTerraform(struct!.update),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsWorkspacesTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -729,6 +885,43 @@ export function mwsWorkspacesTokenToTerraform(struct?: MwsWorkspacesTokenOutputR
}
}
+
+export function mwsWorkspacesTokenToHclTerraform(struct?: MwsWorkspacesTokenOutputReference | MwsWorkspacesToken): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ comment: {
+ value: cdktf.stringToHclTerraform(struct!.comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ lifetime_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.lifetimeSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ token_id: {
+ value: cdktf.stringToHclTerraform(struct!.tokenId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ token_value: {
+ value: cdktf.stringToHclTerraform(struct!.tokenValue),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class MwsWorkspacesTokenOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1389,4 +1582,174 @@ export class MwsWorkspaces extends cdktf.TerraformResource {
token: mwsWorkspacesTokenToTerraform(this._token.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ aws_region: {
+ value: cdktf.stringToHclTerraform(this._awsRegion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cloud: {
+ value: cdktf.stringToHclTerraform(this._cloud),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ creation_time: {
+ value: cdktf.numberToHclTerraform(this._creationTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ credentials_id: {
+ value: cdktf.stringToHclTerraform(this._credentialsId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ customer_managed_key_id: {
+ value: cdktf.stringToHclTerraform(this._customerManagedKeyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ deployment_name: {
+ value: cdktf.stringToHclTerraform(this._deploymentName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ is_no_public_ip_enabled: {
+ value: cdktf.booleanToHclTerraform(this._isNoPublicIpEnabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ location: {
+ value: cdktf.stringToHclTerraform(this._location),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ managed_services_customer_managed_key_id: {
+ value: cdktf.stringToHclTerraform(this._managedServicesCustomerManagedKeyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ network_id: {
+ value: cdktf.stringToHclTerraform(this._networkId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ pricing_tier: {
+ value: cdktf.stringToHclTerraform(this._pricingTier),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_access_settings_id: {
+ value: cdktf.stringToHclTerraform(this._privateAccessSettingsId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_configuration_id: {
+ value: cdktf.stringToHclTerraform(this._storageConfigurationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_customer_managed_key_id: {
+ value: cdktf.stringToHclTerraform(this._storageCustomerManagedKeyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_id: {
+ value: cdktf.numberToHclTerraform(this._workspaceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ workspace_name: {
+ value: cdktf.stringToHclTerraform(this._workspaceName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_status: {
+ value: cdktf.stringToHclTerraform(this._workspaceStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_status_message: {
+ value: cdktf.stringToHclTerraform(this._workspaceStatusMessage),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_url: {
+ value: cdktf.stringToHclTerraform(this._workspaceUrl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cloud_resource_container: {
+ value: mwsWorkspacesCloudResourceContainerToHclTerraform(this._cloudResourceContainer.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsWorkspacesCloudResourceContainerList",
+ },
+ external_customer_info: {
+ value: mwsWorkspacesExternalCustomerInfoToHclTerraform(this._externalCustomerInfo.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsWorkspacesExternalCustomerInfoList",
+ },
+ gcp_managed_network_config: {
+ value: mwsWorkspacesGcpManagedNetworkConfigToHclTerraform(this._gcpManagedNetworkConfig.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsWorkspacesGcpManagedNetworkConfigList",
+ },
+ gke_config: {
+ value: mwsWorkspacesGkeConfigToHclTerraform(this._gkeConfig.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsWorkspacesGkeConfigList",
+ },
+ timeouts: {
+ value: mwsWorkspacesTimeoutsToHclTerraform(this._timeouts.internalValue),
+ isBlock: true,
+ type: "struct",
+ storageClassType: "MwsWorkspacesTimeouts",
+ },
+ token: {
+ value: mwsWorkspacesTokenToHclTerraform(this._token.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "MwsWorkspacesTokenList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/notebook/README.md b/src/notebook/README.md
index ced2c33ca..1825cc3b4 100644
--- a/src/notebook/README.md
+++ b/src/notebook/README.md
@@ -1,3 +1,3 @@
# `databricks_notebook`
-Refer to the Terraform Registory for docs: [`databricks_notebook`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/notebook).
+Refer to the Terraform Registry for docs: [`databricks_notebook`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/notebook).
diff --git a/src/notebook/index.ts b/src/notebook/index.ts
index d1063137c..fd31e14bc 100644
--- a/src/notebook/index.ts
+++ b/src/notebook/index.ts
@@ -282,4 +282,66 @@ export class Notebook extends cdktf.TerraformResource {
source: cdktf.stringToTerraform(this._source),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ content_base64: {
+ value: cdktf.stringToHclTerraform(this._contentBase64),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ format: {
+ value: cdktf.stringToHclTerraform(this._format),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ language: {
+ value: cdktf.stringToHclTerraform(this._language),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ md5: {
+ value: cdktf.stringToHclTerraform(this._md5),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ object_id: {
+ value: cdktf.numberToHclTerraform(this._objectId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ object_type: {
+ value: cdktf.stringToHclTerraform(this._objectType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(this._source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/obo-token/README.md b/src/obo-token/README.md
index 628cf1c39..703275665 100644
--- a/src/obo-token/README.md
+++ b/src/obo-token/README.md
@@ -1,3 +1,3 @@
# `databricks_obo_token`
-Refer to the Terraform Registory for docs: [`databricks_obo_token`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/obo_token).
+Refer to the Terraform Registry for docs: [`databricks_obo_token`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/obo_token).
diff --git a/src/obo-token/index.ts b/src/obo-token/index.ts
index 4d859e972..59905826a 100644
--- a/src/obo-token/index.ts
+++ b/src/obo-token/index.ts
@@ -172,4 +172,36 @@ export class OboToken extends cdktf.TerraformResource {
lifetime_seconds: cdktf.numberToTerraform(this._lifetimeSeconds),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ application_id: {
+ value: cdktf.stringToHclTerraform(this._applicationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ lifetime_seconds: {
+ value: cdktf.numberToHclTerraform(this._lifetimeSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/permission-assignment/README.md b/src/permission-assignment/README.md
index 7c104e16b..4a32dad78 100644
--- a/src/permission-assignment/README.md
+++ b/src/permission-assignment/README.md
@@ -1,3 +1,3 @@
# `databricks_permission_assignment`
-Refer to the Terraform Registory for docs: [`databricks_permission_assignment`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/permission_assignment).
+Refer to the Terraform Registry for docs: [`databricks_permission_assignment`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/permission_assignment).
diff --git a/src/permission-assignment/index.ts b/src/permission-assignment/index.ts
index 9bb5558c1..f1b9e1cfc 100644
--- a/src/permission-assignment/index.ts
+++ b/src/permission-assignment/index.ts
@@ -142,4 +142,30 @@ export class PermissionAssignment extends cdktf.TerraformResource {
principal_id: cdktf.numberToTerraform(this._principalId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ permissions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._permissions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ principal_id: {
+ value: cdktf.numberToHclTerraform(this._principalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/permissions/README.md b/src/permissions/README.md
index 6e41bae5d..8eea5e08d 100644
--- a/src/permissions/README.md
+++ b/src/permissions/README.md
@@ -1,3 +1,3 @@
# `databricks_permissions`
-Refer to the Terraform Registory for docs: [`databricks_permissions`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/permissions).
+Refer to the Terraform Registry for docs: [`databricks_permissions`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/permissions).
diff --git a/src/permissions/index.ts b/src/permissions/index.ts
index da52eb7a4..fb9b40fca 100644
--- a/src/permissions/index.ts
+++ b/src/permissions/index.ts
@@ -146,6 +146,43 @@ export function permissionsAccessControlToTerraform(struct?: PermissionsAccessCo
}
}
+
+export function permissionsAccessControlToHclTerraform(struct?: PermissionsAccessControl | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ group_name: {
+ value: cdktf.stringToHclTerraform(struct!.groupName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ permission_level: {
+ value: cdktf.stringToHclTerraform(struct!.permissionLevel),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ service_principal_name: {
+ value: cdktf.stringToHclTerraform(struct!.servicePrincipalName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_name: {
+ value: cdktf.stringToHclTerraform(struct!.userName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PermissionsAccessControlOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -784,4 +821,156 @@ export class Permissions extends cdktf.TerraformResource {
access_control: cdktf.listMapper(permissionsAccessControlToTerraform, true)(this._accessControl.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ authorization: {
+ value: cdktf.stringToHclTerraform(this._authorization),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_policy_id: {
+ value: cdktf.stringToHclTerraform(this._clusterPolicyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory_id: {
+ value: cdktf.stringToHclTerraform(this._directoryId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory_path: {
+ value: cdktf.stringToHclTerraform(this._directoryPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ experiment_id: {
+ value: cdktf.stringToHclTerraform(this._experimentId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(this._instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ job_id: {
+ value: cdktf.stringToHclTerraform(this._jobId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ notebook_id: {
+ value: cdktf.stringToHclTerraform(this._notebookId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ notebook_path: {
+ value: cdktf.stringToHclTerraform(this._notebookPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ object_type: {
+ value: cdktf.stringToHclTerraform(this._objectType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ pipeline_id: {
+ value: cdktf.stringToHclTerraform(this._pipelineId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ registered_model_id: {
+ value: cdktf.stringToHclTerraform(this._registeredModelId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo_id: {
+ value: cdktf.stringToHclTerraform(this._repoId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repo_path: {
+ value: cdktf.stringToHclTerraform(this._repoPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ serving_endpoint_id: {
+ value: cdktf.stringToHclTerraform(this._servingEndpointId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ sql_alert_id: {
+ value: cdktf.stringToHclTerraform(this._sqlAlertId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ sql_dashboard_id: {
+ value: cdktf.stringToHclTerraform(this._sqlDashboardId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ sql_endpoint_id: {
+ value: cdktf.stringToHclTerraform(this._sqlEndpointId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ sql_query_id: {
+ value: cdktf.stringToHclTerraform(this._sqlQueryId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_file_id: {
+ value: cdktf.stringToHclTerraform(this._workspaceFileId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_file_path: {
+ value: cdktf.stringToHclTerraform(this._workspaceFilePath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ access_control: {
+ value: cdktf.listMapperHcl(permissionsAccessControlToHclTerraform, true)(this._accessControl.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "PermissionsAccessControlList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/pipeline/README.md b/src/pipeline/README.md
index f62fb3501..c7120d587 100644
--- a/src/pipeline/README.md
+++ b/src/pipeline/README.md
@@ -1,3 +1,3 @@
# `databricks_pipeline`
-Refer to the Terraform Registory for docs: [`databricks_pipeline`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/pipeline).
+Refer to the Terraform Registry for docs: [`databricks_pipeline`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/pipeline).
diff --git a/src/pipeline/index.ts b/src/pipeline/index.ts
index 71b816187..196f288d7 100644
--- a/src/pipeline/index.ts
+++ b/src/pipeline/index.ts
@@ -125,6 +125,37 @@ export function pipelineClusterAutoscaleToTerraform(struct?: PipelineClusterAuto
}
}
+
+export function pipelineClusterAutoscaleToHclTerraform(struct?: PipelineClusterAutoscaleOutputReference | PipelineClusterAutoscale): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ max_workers: {
+ value: cdktf.numberToHclTerraform(struct!.maxWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_workers: {
+ value: cdktf.numberToHclTerraform(struct!.minWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ mode: {
+ value: cdktf.stringToHclTerraform(struct!.mode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterAutoscaleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -269,6 +300,67 @@ export function pipelineClusterAwsAttributesToTerraform(struct?: PipelineCluster
}
}
+
+export function pipelineClusterAwsAttributesToHclTerraform(struct?: PipelineClusterAwsAttributesOutputReference | PipelineClusterAwsAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ebs_volume_count: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_size: {
+ value: cdktf.numberToHclTerraform(struct!.ebsVolumeSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ ebs_volume_type: {
+ value: cdktf.stringToHclTerraform(struct!.ebsVolumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(struct!.instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spot_bid_price_percent: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidPricePercent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterAwsAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -498,6 +590,37 @@ export function pipelineClusterAzureAttributesToTerraform(struct?: PipelineClust
}
}
+
+export function pipelineClusterAzureAttributesToHclTerraform(struct?: PipelineClusterAzureAttributesOutputReference | PipelineClusterAzureAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ first_on_demand: {
+ value: cdktf.numberToHclTerraform(struct!.firstOnDemand),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_bid_max_price: {
+ value: cdktf.numberToHclTerraform(struct!.spotBidMaxPrice),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterAzureAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -607,6 +730,25 @@ export function pipelineClusterClusterLogConfDbfsToTerraform(struct?: PipelineCl
}
}
+
+export function pipelineClusterClusterLogConfDbfsToHclTerraform(struct?: PipelineClusterClusterLogConfDbfsOutputReference | PipelineClusterClusterLogConfDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterClusterLogConfDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -699,6 +841,61 @@ export function pipelineClusterClusterLogConfS3ToTerraform(struct?: PipelineClus
}
}
+
+export function pipelineClusterClusterLogConfS3ToHclTerraform(struct?: PipelineClusterClusterLogConfS3OutputReference | PipelineClusterClusterLogConfS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterClusterLogConfS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -902,6 +1099,31 @@ export function pipelineClusterClusterLogConfToTerraform(struct?: PipelineCluste
}
}
+
+export function pipelineClusterClusterLogConfToHclTerraform(struct?: PipelineClusterClusterLogConfOutputReference | PipelineClusterClusterLogConf): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dbfs: {
+ value: pipelineClusterClusterLogConfDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterClusterLogConfDbfsList",
+ },
+ s3: {
+ value: pipelineClusterClusterLogConfS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterClusterLogConfS3List",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterClusterLogConfOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1004,6 +1226,43 @@ export function pipelineClusterGcpAttributesToTerraform(struct?: PipelineCluster
}
}
+
+export function pipelineClusterGcpAttributesToHclTerraform(struct?: PipelineClusterGcpAttributesOutputReference | PipelineClusterGcpAttributes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ availability: {
+ value: cdktf.stringToHclTerraform(struct!.availability),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(struct!.googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ local_ssd_count: {
+ value: cdktf.numberToHclTerraform(struct!.localSsdCount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ zone_id: {
+ value: cdktf.stringToHclTerraform(struct!.zoneId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterGcpAttributesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1135,6 +1394,25 @@ export function pipelineClusterInitScriptsAbfssToTerraform(struct?: PipelineClus
}
}
+
+export function pipelineClusterInitScriptsAbfssToHclTerraform(struct?: PipelineClusterInitScriptsAbfssOutputReference | PipelineClusterInitScriptsAbfss): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterInitScriptsAbfssOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1200,6 +1478,25 @@ export function pipelineClusterInitScriptsDbfsToTerraform(struct?: PipelineClust
}
}
+
+export function pipelineClusterInitScriptsDbfsToHclTerraform(struct?: PipelineClusterInitScriptsDbfsOutputReference | PipelineClusterInitScriptsDbfs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterInitScriptsDbfsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1262,6 +1559,25 @@ export function pipelineClusterInitScriptsFileToTerraform(struct?: PipelineClust
}
}
+
+export function pipelineClusterInitScriptsFileToHclTerraform(struct?: PipelineClusterInitScriptsFileOutputReference | PipelineClusterInitScriptsFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterInitScriptsFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1327,6 +1643,25 @@ export function pipelineClusterInitScriptsGcsToTerraform(struct?: PipelineCluste
}
}
+
+export function pipelineClusterInitScriptsGcsToHclTerraform(struct?: PipelineClusterInitScriptsGcsOutputReference | PipelineClusterInitScriptsGcs): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterInitScriptsGcsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1422,6 +1757,61 @@ export function pipelineClusterInitScriptsS3ToTerraform(struct?: PipelineCluster
}
}
+
+export function pipelineClusterInitScriptsS3ToHclTerraform(struct?: PipelineClusterInitScriptsS3OutputReference | PipelineClusterInitScriptsS3): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ canned_acl: {
+ value: cdktf.stringToHclTerraform(struct!.cannedAcl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ encryption_type: {
+ value: cdktf.stringToHclTerraform(struct!.encryptionType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ endpoint: {
+ value: cdktf.stringToHclTerraform(struct!.endpoint),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ kms_key: {
+ value: cdktf.stringToHclTerraform(struct!.kmsKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ region: {
+ value: cdktf.stringToHclTerraform(struct!.region),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterInitScriptsS3OutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1616,6 +2006,25 @@ export function pipelineClusterInitScriptsVolumesToTerraform(struct?: PipelineCl
}
}
+
+export function pipelineClusterInitScriptsVolumesToHclTerraform(struct?: PipelineClusterInitScriptsVolumesOutputReference | PipelineClusterInitScriptsVolumes): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterInitScriptsVolumesOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1681,6 +2090,25 @@ export function pipelineClusterInitScriptsWorkspaceToTerraform(struct?: Pipeline
}
}
+
+export function pipelineClusterInitScriptsWorkspaceToHclTerraform(struct?: PipelineClusterInitScriptsWorkspaceOutputReference | PipelineClusterInitScriptsWorkspace): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ destination: {
+ value: cdktf.stringToHclTerraform(struct!.destination),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterInitScriptsWorkspaceOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1790,6 +2218,61 @@ export function pipelineClusterInitScriptsToTerraform(struct?: PipelineClusterIn
}
}
+
+export function pipelineClusterInitScriptsToHclTerraform(struct?: PipelineClusterInitScripts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ abfss: {
+ value: pipelineClusterInitScriptsAbfssToHclTerraform(struct!.abfss),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterInitScriptsAbfssList",
+ },
+ dbfs: {
+ value: pipelineClusterInitScriptsDbfsToHclTerraform(struct!.dbfs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterInitScriptsDbfsList",
+ },
+ file: {
+ value: pipelineClusterInitScriptsFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterInitScriptsFileList",
+ },
+ gcs: {
+ value: pipelineClusterInitScriptsGcsToHclTerraform(struct!.gcs),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterInitScriptsGcsList",
+ },
+ s3: {
+ value: pipelineClusterInitScriptsS3ToHclTerraform(struct!.s3),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterInitScriptsS3List",
+ },
+ volumes: {
+ value: pipelineClusterInitScriptsVolumesToHclTerraform(struct!.volumes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterInitScriptsVolumesList",
+ },
+ workspace: {
+ value: pipelineClusterInitScriptsWorkspaceToHclTerraform(struct!.workspace),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterInitScriptsWorkspaceList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterInitScriptsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -2121,6 +2604,133 @@ export function pipelineClusterToTerraform(struct?: PipelineCluster | cdktf.IRes
}
}
+
+export function pipelineClusterToHclTerraform(struct?: PipelineCluster | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ apply_policy_default_values: {
+ value: cdktf.booleanToHclTerraform(struct!.applyPolicyDefaultValues),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ custom_tags: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.customTags),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ driver_instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverInstancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ driver_node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.driverNodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_local_disk_encryption: {
+ value: cdktf.booleanToHclTerraform(struct!.enableLocalDiskEncryption),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ instance_pool_id: {
+ value: cdktf.stringToHclTerraform(struct!.instancePoolId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ label: {
+ value: cdktf.stringToHclTerraform(struct!.label),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ node_type_id: {
+ value: cdktf.stringToHclTerraform(struct!.nodeTypeId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_workers: {
+ value: cdktf.numberToHclTerraform(struct!.numWorkers),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ policy_id: {
+ value: cdktf.stringToHclTerraform(struct!.policyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ spark_conf: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkConf),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ spark_env_vars: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(struct!.sparkEnvVars),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ ssh_public_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.sshPublicKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ autoscale: {
+ value: pipelineClusterAutoscaleToHclTerraform(struct!.autoscale),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterAutoscaleList",
+ },
+ aws_attributes: {
+ value: pipelineClusterAwsAttributesToHclTerraform(struct!.awsAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterAwsAttributesList",
+ },
+ azure_attributes: {
+ value: pipelineClusterAzureAttributesToHclTerraform(struct!.azureAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterAzureAttributesList",
+ },
+ cluster_log_conf: {
+ value: pipelineClusterClusterLogConfToHclTerraform(struct!.clusterLogConf),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterClusterLogConfList",
+ },
+ gcp_attributes: {
+ value: pipelineClusterGcpAttributesToHclTerraform(struct!.gcpAttributes),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterGcpAttributesList",
+ },
+ init_scripts: {
+ value: cdktf.listMapperHcl(pipelineClusterInitScriptsToHclTerraform, true)(struct!.initScripts),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterInitScriptsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineClusterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -2619,6 +3229,31 @@ export function pipelineFiltersToTerraform(struct?: PipelineFiltersOutputReferen
}
}
+
+export function pipelineFiltersToHclTerraform(struct?: PipelineFiltersOutputReference | PipelineFilters): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ exclude: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.exclude),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ include: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.include),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineFiltersOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2706,6 +3341,25 @@ export function pipelineLibraryFileToTerraform(struct?: PipelineLibraryFileOutpu
}
}
+
+export function pipelineLibraryFileToHclTerraform(struct?: PipelineLibraryFileOutputReference | PipelineLibraryFile): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ path: {
+ value: cdktf.stringToHclTerraform(struct!.path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineLibraryFileOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2778,6 +3432,37 @@ export function pipelineLibraryMavenToTerraform(struct?: PipelineLibraryMavenOut
}
}
+
+export function pipelineLibraryMavenToHclTerraform(struct?: PipelineLibraryMavenOutputReference | PipelineLibraryMaven): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ coordinates: {
+ value: cdktf.stringToHclTerraform(struct!.coordinates),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ exclusions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.exclusions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ repo: {
+ value: cdktf.stringToHclTerraform(struct!.repo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineLibraryMavenOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2884,6 +3569,25 @@ export function pipelineLibraryNotebookToTerraform(struct?: PipelineLibraryNoteb
}
}
+
+export function pipelineLibraryNotebookToHclTerraform(struct?: PipelineLibraryNotebookOutputReference | PipelineLibraryNotebook): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ path: {
+ value: cdktf.stringToHclTerraform(struct!.path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineLibraryNotebookOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2972,6 +3676,49 @@ export function pipelineLibraryToTerraform(struct?: PipelineLibrary | cdktf.IRes
}
}
+
+export function pipelineLibraryToHclTerraform(struct?: PipelineLibrary | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ jar: {
+ value: cdktf.stringToHclTerraform(struct!.jar),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ whl: {
+ value: cdktf.stringToHclTerraform(struct!.whl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ file: {
+ value: pipelineLibraryFileToHclTerraform(struct!.file),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineLibraryFileList",
+ },
+ maven: {
+ value: pipelineLibraryMavenToHclTerraform(struct!.maven),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineLibraryMavenList",
+ },
+ notebook: {
+ value: pipelineLibraryNotebookToHclTerraform(struct!.notebook),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineLibraryNotebookList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineLibraryOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -3162,6 +3909,31 @@ export function pipelineNotificationToTerraform(struct?: PipelineNotification |
}
}
+
+export function pipelineNotificationToHclTerraform(struct?: PipelineNotification | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ alerts: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.alerts),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ email_recipients: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.emailRecipients),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineNotificationOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -3275,6 +4047,25 @@ export function pipelineTimeoutsToTerraform(struct?: PipelineTimeouts | cdktf.IR
}
}
+
+export function pipelineTimeoutsToHclTerraform(struct?: PipelineTimeouts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ default: {
+ value: cdktf.stringToHclTerraform(struct!.default),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class PipelineTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -3728,4 +4519,120 @@ export class Pipeline extends cdktf.TerraformResource {
timeouts: pipelineTimeoutsToTerraform(this._timeouts.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ allow_duplicate_names: {
+ value: cdktf.booleanToHclTerraform(this._allowDuplicateNames),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ catalog: {
+ value: cdktf.stringToHclTerraform(this._catalog),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ channel: {
+ value: cdktf.stringToHclTerraform(this._channel),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ configuration: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._configuration),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ continuous: {
+ value: cdktf.booleanToHclTerraform(this._continuous),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ development: {
+ value: cdktf.booleanToHclTerraform(this._development),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ edition: {
+ value: cdktf.stringToHclTerraform(this._edition),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ photon: {
+ value: cdktf.booleanToHclTerraform(this._photon),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ serverless: {
+ value: cdktf.booleanToHclTerraform(this._serverless),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ storage: {
+ value: cdktf.stringToHclTerraform(this._storage),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ target: {
+ value: cdktf.stringToHclTerraform(this._target),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster: {
+ value: cdktf.listMapperHcl(pipelineClusterToHclTerraform, true)(this._cluster.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineClusterList",
+ },
+ filters: {
+ value: pipelineFiltersToHclTerraform(this._filters.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineFiltersList",
+ },
+ library: {
+ value: cdktf.listMapperHcl(pipelineLibraryToHclTerraform, true)(this._library.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "PipelineLibraryList",
+ },
+ notification: {
+ value: cdktf.listMapperHcl(pipelineNotificationToHclTerraform, true)(this._notification.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "PipelineNotificationList",
+ },
+ timeouts: {
+ value: pipelineTimeoutsToHclTerraform(this._timeouts.internalValue),
+ isBlock: true,
+ type: "struct",
+ storageClassType: "PipelineTimeouts",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/provider-resource/README.md b/src/provider-resource/README.md
index 76e513839..dc3518e20 100644
--- a/src/provider-resource/README.md
+++ b/src/provider-resource/README.md
@@ -1,3 +1,3 @@
# `databricks_provider`
-Refer to the Terraform Registory for docs: [`databricks_provider`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/provider).
+Refer to the Terraform Registry for docs: [`databricks_provider`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/provider).
diff --git a/src/provider-resource/index.ts b/src/provider-resource/index.ts
index 86d501383..05e8e3d43 100644
--- a/src/provider-resource/index.ts
+++ b/src/provider-resource/index.ts
@@ -183,4 +183,42 @@ export class ProviderResource extends cdktf.TerraformResource {
recipient_profile_str: cdktf.stringToTerraform(this._recipientProfileStr),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ authentication_type: {
+ value: cdktf.stringToHclTerraform(this._authenticationType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ recipient_profile_str: {
+ value: cdktf.stringToHclTerraform(this._recipientProfileStr),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/provider/README.md b/src/provider/README.md
index 10c90a316..a3243d068 100644
--- a/src/provider/README.md
+++ b/src/provider/README.md
@@ -1,3 +1,3 @@
# `provider`
-Refer to the Terraform Registory for docs: [`databricks`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs).
+Refer to the Terraform Registry for docs: [`databricks`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs).
diff --git a/src/provider/index.ts b/src/provider/index.ts
index c4b21c123..fbc6f6136 100644
--- a/src/provider/index.ts
+++ b/src/provider/index.ts
@@ -735,4 +735,192 @@ export class DatabricksProvider extends cdktf.TerraformProvider {
alias: cdktf.stringToTerraform(this._alias),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ account_id: {
+ value: cdktf.stringToHclTerraform(this._accountId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ auth_type: {
+ value: cdktf.stringToHclTerraform(this._authType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ azure_client_id: {
+ value: cdktf.stringToHclTerraform(this._azureClientId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ azure_client_secret: {
+ value: cdktf.stringToHclTerraform(this._azureClientSecret),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ azure_environment: {
+ value: cdktf.stringToHclTerraform(this._azureEnvironment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ azure_login_app_id: {
+ value: cdktf.stringToHclTerraform(this._azureLoginAppId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ azure_tenant_id: {
+ value: cdktf.stringToHclTerraform(this._azureTenantId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ azure_use_msi: {
+ value: cdktf.booleanToHclTerraform(this._azureUseMsi),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ azure_workspace_resource_id: {
+ value: cdktf.stringToHclTerraform(this._azureWorkspaceResourceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_id: {
+ value: cdktf.stringToHclTerraform(this._clientId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret: {
+ value: cdktf.stringToHclTerraform(this._clientSecret),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ config_file: {
+ value: cdktf.stringToHclTerraform(this._configFile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ databricks_cli_path: {
+ value: cdktf.stringToHclTerraform(this._databricksCliPath),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ debug_headers: {
+ value: cdktf.booleanToHclTerraform(this._debugHeaders),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ debug_truncate_bytes: {
+ value: cdktf.numberToHclTerraform(this._debugTruncateBytes),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ google_credentials: {
+ value: cdktf.stringToHclTerraform(this._googleCredentials),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(this._googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ host: {
+ value: cdktf.stringToHclTerraform(this._host),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ http_timeout_seconds: {
+ value: cdktf.numberToHclTerraform(this._httpTimeoutSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ metadata_service_url: {
+ value: cdktf.stringToHclTerraform(this._metadataServiceUrl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ password: {
+ value: cdktf.stringToHclTerraform(this._password),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ profile: {
+ value: cdktf.stringToHclTerraform(this._profile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ rate_limit: {
+ value: cdktf.numberToHclTerraform(this._rateLimit),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ retry_timeout_seconds: {
+ value: cdktf.numberToHclTerraform(this._retryTimeoutSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ skip_verify: {
+ value: cdktf.booleanToHclTerraform(this._skipVerify),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ token: {
+ value: cdktf.stringToHclTerraform(this._token),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ username: {
+ value: cdktf.stringToHclTerraform(this._username),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ warehouse_id: {
+ value: cdktf.stringToHclTerraform(this._warehouseId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ alias: {
+ value: cdktf.stringToHclTerraform(this._alias),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/recipient/README.md b/src/recipient/README.md
index 2140f068b..59e0f4a40 100644
--- a/src/recipient/README.md
+++ b/src/recipient/README.md
@@ -1,3 +1,3 @@
# `databricks_recipient`
-Refer to the Terraform Registory for docs: [`databricks_recipient`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/recipient).
+Refer to the Terraform Registry for docs: [`databricks_recipient`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/recipient).
diff --git a/src/recipient/index.ts b/src/recipient/index.ts
index 0e6d99eb8..8229459df 100644
--- a/src/recipient/index.ts
+++ b/src/recipient/index.ts
@@ -73,6 +73,25 @@ export function recipientIpAccessListStructToTerraform(struct?: RecipientIpAcces
}
}
+
+export function recipientIpAccessListStructToHclTerraform(struct?: RecipientIpAccessListStructOutputReference | RecipientIpAccessListStruct): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ allowed_ip_addresses: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.allowedIpAddresses),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class RecipientIpAccessListStructOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -168,6 +187,61 @@ export function recipientTokensToTerraform(struct?: RecipientTokens | cdktf.IRes
}
}
+
+export function recipientTokensToHclTerraform(struct?: RecipientTokens | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ activation_url: {
+ value: cdktf.stringToHclTerraform(struct!.activationUrl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ created_at: {
+ value: cdktf.numberToHclTerraform(struct!.createdAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ created_by: {
+ value: cdktf.stringToHclTerraform(struct!.createdBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ expiration_time: {
+ value: cdktf.numberToHclTerraform(struct!.expirationTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(struct!.id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ updated_at: {
+ value: cdktf.numberToHclTerraform(struct!.updatedAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ updated_by: {
+ value: cdktf.stringToHclTerraform(struct!.updatedBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class RecipientTokensOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -602,4 +676,66 @@ export class Recipient extends cdktf.TerraformResource {
tokens: cdktf.listMapper(recipientTokensToTerraform, true)(this._tokens.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ authentication_type: {
+ value: cdktf.stringToHclTerraform(this._authenticationType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ data_recipient_global_metastore_id: {
+ value: cdktf.stringToHclTerraform(this._dataRecipientGlobalMetastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ sharing_code: {
+ value: cdktf.stringToHclTerraform(this._sharingCode),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ ip_access_list: {
+ value: recipientIpAccessListStructToHclTerraform(this._ipAccessList.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "RecipientIpAccessListStructList",
+ },
+ tokens: {
+ value: cdktf.listMapperHcl(recipientTokensToHclTerraform, true)(this._tokens.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "RecipientTokensList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/registered-model/README.md b/src/registered-model/README.md
index 75f2be136..e2c6d6995 100644
--- a/src/registered-model/README.md
+++ b/src/registered-model/README.md
@@ -1,3 +1,3 @@
# `databricks_registered_model`
-Refer to the Terraform Registory for docs: [`databricks_registered_model`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/registered_model).
+Refer to the Terraform Registry for docs: [`databricks_registered_model`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/registered_model).
diff --git a/src/registered-model/index.ts b/src/registered-model/index.ts
index 1e63edb83..96bef18cd 100644
--- a/src/registered-model/index.ts
+++ b/src/registered-model/index.ts
@@ -205,4 +205,48 @@ export class RegisteredModel extends cdktf.TerraformResource {
storage_location: cdktf.stringToTerraform(this._storageLocation),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(this._catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ schema_name: {
+ value: cdktf.stringToHclTerraform(this._schemaName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_location: {
+ value: cdktf.stringToHclTerraform(this._storageLocation),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/repo/README.md b/src/repo/README.md
index 89058bb9d..0342d6b50 100644
--- a/src/repo/README.md
+++ b/src/repo/README.md
@@ -1,3 +1,3 @@
# `databricks_repo`
-Refer to the Terraform Registory for docs: [`databricks_repo`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/repo).
+Refer to the Terraform Registry for docs: [`databricks_repo`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/repo).
diff --git a/src/repo/index.ts b/src/repo/index.ts
index 97eea871f..21bd7dcce 100644
--- a/src/repo/index.ts
+++ b/src/repo/index.ts
@@ -67,6 +67,25 @@ export function repoSparseCheckoutToTerraform(struct?: RepoSparseCheckoutOutputR
}
}
+
+export function repoSparseCheckoutToHclTerraform(struct?: RepoSparseCheckoutOutputReference | RepoSparseCheckout): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ patterns: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.patterns),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class RepoSparseCheckoutOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -319,4 +338,60 @@ export class Repo extends cdktf.TerraformResource {
sparse_checkout: repoSparseCheckoutToTerraform(this._sparseCheckout.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ branch: {
+ value: cdktf.stringToHclTerraform(this._branch),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ commit_hash: {
+ value: cdktf.stringToHclTerraform(this._commitHash),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ git_provider: {
+ value: cdktf.stringToHclTerraform(this._gitProvider),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tag: {
+ value: cdktf.stringToHclTerraform(this._tag),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ url: {
+ value: cdktf.stringToHclTerraform(this._url),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ sparse_checkout: {
+ value: repoSparseCheckoutToHclTerraform(this._sparseCheckout.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "RepoSparseCheckoutList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/schema/README.md b/src/schema/README.md
index 4433671d5..16f79b2f8 100644
--- a/src/schema/README.md
+++ b/src/schema/README.md
@@ -1,3 +1,3 @@
# `databricks_schema`
-Refer to the Terraform Registory for docs: [`databricks_schema`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/schema).
+Refer to the Terraform Registry for docs: [`databricks_schema`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/schema).
diff --git a/src/schema/index.ts b/src/schema/index.ts
index c56d503dd..aed0e96a1 100644
--- a/src/schema/index.ts
+++ b/src/schema/index.ts
@@ -274,4 +274,66 @@ export class Schema extends cdktf.TerraformResource {
storage_root: cdktf.stringToTerraform(this._storageRoot),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(this._catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ force_destroy: {
+ value: cdktf.booleanToHclTerraform(this._forceDestroy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ properties: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._properties),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ storage_root: {
+ value: cdktf.stringToHclTerraform(this._storageRoot),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/secret-acl/README.md b/src/secret-acl/README.md
index 92458bb26..fffeb5fdb 100644
--- a/src/secret-acl/README.md
+++ b/src/secret-acl/README.md
@@ -1,3 +1,3 @@
# `databricks_secret_acl`
-Refer to the Terraform Registory for docs: [`databricks_secret_acl`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/secret_acl).
+Refer to the Terraform Registry for docs: [`databricks_secret_acl`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/secret_acl).
diff --git a/src/secret-acl/index.ts b/src/secret-acl/index.ts
index 3492b8dd9..7499bffe4 100644
--- a/src/secret-acl/index.ts
+++ b/src/secret-acl/index.ts
@@ -161,4 +161,36 @@ export class SecretAcl extends cdktf.TerraformResource {
scope: cdktf.stringToTerraform(this._scope),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ permission: {
+ value: cdktf.stringToHclTerraform(this._permission),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ principal: {
+ value: cdktf.stringToHclTerraform(this._principal),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ scope: {
+ value: cdktf.stringToHclTerraform(this._scope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/secret-scope/README.md b/src/secret-scope/README.md
index 488f253e4..17f327b4b 100644
--- a/src/secret-scope/README.md
+++ b/src/secret-scope/README.md
@@ -1,3 +1,3 @@
# `databricks_secret_scope`
-Refer to the Terraform Registory for docs: [`databricks_secret_scope`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/secret_scope).
+Refer to the Terraform Registry for docs: [`databricks_secret_scope`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/secret_scope).
diff --git a/src/secret-scope/index.ts b/src/secret-scope/index.ts
index 951b10cd3..01ffd113e 100644
--- a/src/secret-scope/index.ts
+++ b/src/secret-scope/index.ts
@@ -60,6 +60,31 @@ export function secretScopeKeyvaultMetadataToTerraform(struct?: SecretScopeKeyva
}
}
+
+export function secretScopeKeyvaultMetadataToHclTerraform(struct?: SecretScopeKeyvaultMetadataOutputReference | SecretScopeKeyvaultMetadata): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ dns_name: {
+ value: cdktf.stringToHclTerraform(struct!.dnsName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ resource_id: {
+ value: cdktf.stringToHclTerraform(struct!.resourceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SecretScopeKeyvaultMetadataOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -277,4 +302,42 @@ export class SecretScope extends cdktf.TerraformResource {
keyvault_metadata: secretScopeKeyvaultMetadataToTerraform(this._keyvaultMetadata.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ backend_type: {
+ value: cdktf.stringToHclTerraform(this._backendType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ initial_manage_principal: {
+ value: cdktf.stringToHclTerraform(this._initialManagePrincipal),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ keyvault_metadata: {
+ value: secretScopeKeyvaultMetadataToHclTerraform(this._keyvaultMetadata.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SecretScopeKeyvaultMetadataList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/secret/README.md b/src/secret/README.md
index 98451fb93..0d87e6315 100644
--- a/src/secret/README.md
+++ b/src/secret/README.md
@@ -1,3 +1,3 @@
# `databricks_secret`
-Refer to the Terraform Registory for docs: [`databricks_secret`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/secret).
+Refer to the Terraform Registry for docs: [`databricks_secret`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/secret).
diff --git a/src/secret/index.ts b/src/secret/index.ts
index 37ec1d6cc..f20666077 100644
--- a/src/secret/index.ts
+++ b/src/secret/index.ts
@@ -171,4 +171,36 @@ export class Secret extends cdktf.TerraformResource {
string_value: cdktf.stringToTerraform(this._stringValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ key: {
+ value: cdktf.stringToHclTerraform(this._key),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ scope: {
+ value: cdktf.stringToHclTerraform(this._scope),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ string_value: {
+ value: cdktf.stringToHclTerraform(this._stringValue),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/service-principal-role/README.md b/src/service-principal-role/README.md
index a427c2b81..ac0a84b95 100644
--- a/src/service-principal-role/README.md
+++ b/src/service-principal-role/README.md
@@ -1,3 +1,3 @@
# `databricks_service_principal_role`
-Refer to the Terraform Registory for docs: [`databricks_service_principal_role`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/service_principal_role).
+Refer to the Terraform Registry for docs: [`databricks_service_principal_role`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/service_principal_role).
diff --git a/src/service-principal-role/index.ts b/src/service-principal-role/index.ts
index 26c9c5eb0..2b0f5289b 100644
--- a/src/service-principal-role/index.ts
+++ b/src/service-principal-role/index.ts
@@ -142,4 +142,30 @@ export class ServicePrincipalRole extends cdktf.TerraformResource {
service_principal_id: cdktf.stringToTerraform(this._servicePrincipalId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ role: {
+ value: cdktf.stringToHclTerraform(this._role),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ service_principal_id: {
+ value: cdktf.stringToHclTerraform(this._servicePrincipalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/service-principal-secret/README.md b/src/service-principal-secret/README.md
index f31869fd5..913da65a7 100644
--- a/src/service-principal-secret/README.md
+++ b/src/service-principal-secret/README.md
@@ -1,3 +1,3 @@
# `databricks_service_principal_secret`
-Refer to the Terraform Registory for docs: [`databricks_service_principal_secret`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/service_principal_secret).
+Refer to the Terraform Registry for docs: [`databricks_service_principal_secret`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/service_principal_secret).
diff --git a/src/service-principal-secret/index.ts b/src/service-principal-secret/index.ts
index 00bc49bd4..f7e357cc6 100644
--- a/src/service-principal-secret/index.ts
+++ b/src/service-principal-secret/index.ts
@@ -167,4 +167,36 @@ export class ServicePrincipalSecret extends cdktf.TerraformResource {
status: cdktf.stringToTerraform(this._status),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ secret: {
+ value: cdktf.stringToHclTerraform(this._secret),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ service_principal_id: {
+ value: cdktf.stringToHclTerraform(this._servicePrincipalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ status: {
+ value: cdktf.stringToHclTerraform(this._status),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/service-principal/README.md b/src/service-principal/README.md
index bcf5b278d..40f7ec820 100644
--- a/src/service-principal/README.md
+++ b/src/service-principal/README.md
@@ -1,3 +1,3 @@
# `databricks_service_principal`
-Refer to the Terraform Registory for docs: [`databricks_service_principal`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/service_principal).
+Refer to the Terraform Registry for docs: [`databricks_service_principal`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/service_principal).
diff --git a/src/service-principal/index.ts b/src/service-principal/index.ts
index 166adb420..96799c611 100644
--- a/src/service-principal/index.ts
+++ b/src/service-principal/index.ts
@@ -434,4 +434,108 @@ export class ServicePrincipal extends cdktf.TerraformResource {
workspace_access: cdktf.booleanToTerraform(this._workspaceAccess),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ acl_principal_id: {
+ value: cdktf.stringToHclTerraform(this._aclPrincipalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ active: {
+ value: cdktf.booleanToHclTerraform(this._active),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ allow_cluster_create: {
+ value: cdktf.booleanToHclTerraform(this._allowClusterCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ allow_instance_pool_create: {
+ value: cdktf.booleanToHclTerraform(this._allowInstancePoolCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ application_id: {
+ value: cdktf.stringToHclTerraform(this._applicationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ databricks_sql_access: {
+ value: cdktf.booleanToHclTerraform(this._databricksSqlAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ disable_as_user_deletion: {
+ value: cdktf.booleanToHclTerraform(this._disableAsUserDeletion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ display_name: {
+ value: cdktf.stringToHclTerraform(this._displayName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ external_id: {
+ value: cdktf.stringToHclTerraform(this._externalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ force: {
+ value: cdktf.booleanToHclTerraform(this._force),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ force_delete_home_dir: {
+ value: cdktf.booleanToHclTerraform(this._forceDeleteHomeDir),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ force_delete_repos: {
+ value: cdktf.booleanToHclTerraform(this._forceDeleteRepos),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ home: {
+ value: cdktf.stringToHclTerraform(this._home),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repos: {
+ value: cdktf.stringToHclTerraform(this._repos),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_access: {
+ value: cdktf.booleanToHclTerraform(this._workspaceAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/share/README.md b/src/share/README.md
index 21ade1d62..c0add2b0e 100644
--- a/src/share/README.md
+++ b/src/share/README.md
@@ -1,3 +1,3 @@
# `databricks_share`
-Refer to the Terraform Registory for docs: [`databricks_share`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/share).
+Refer to the Terraform Registry for docs: [`databricks_share`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/share).
diff --git a/src/share/index.ts b/src/share/index.ts
index deb669a16..e92b82eef 100644
--- a/src/share/index.ts
+++ b/src/share/index.ts
@@ -74,6 +74,43 @@ export function shareObjectPartitionValueToTerraform(struct?: ShareObjectPartiti
}
}
+
+export function shareObjectPartitionValueToHclTerraform(struct?: ShareObjectPartitionValue | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ op: {
+ value: cdktf.stringToHclTerraform(struct!.op),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ recipient_property_key: {
+ value: cdktf.stringToHclTerraform(struct!.recipientPropertyKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ShareObjectPartitionValueOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -233,6 +270,25 @@ export function shareObjectPartitionToTerraform(struct?: ShareObjectPartition |
}
}
+
+export function shareObjectPartitionToHclTerraform(struct?: ShareObjectPartition | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.listMapperHcl(shareObjectPartitionValueToHclTerraform, true)(struct!.value),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ShareObjectPartitionValueList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ShareObjectPartitionOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -379,6 +435,85 @@ export function shareObjectToTerraform(struct?: ShareObject | cdktf.IResolvable)
}
}
+
+export function shareObjectToHclTerraform(struct?: ShareObject | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ added_at: {
+ value: cdktf.numberToHclTerraform(struct!.addedAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ added_by: {
+ value: cdktf.stringToHclTerraform(struct!.addedBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cdf_enabled: {
+ value: cdktf.booleanToHclTerraform(struct!.cdfEnabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(struct!.comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ data_object_type: {
+ value: cdktf.stringToHclTerraform(struct!.dataObjectType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ history_data_sharing_status: {
+ value: cdktf.stringToHclTerraform(struct!.historyDataSharingStatus),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ shared_as: {
+ value: cdktf.stringToHclTerraform(struct!.sharedAs),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ start_version: {
+ value: cdktf.numberToHclTerraform(struct!.startVersion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ status: {
+ value: cdktf.stringToHclTerraform(struct!.status),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ partition: {
+ value: cdktf.listMapperHcl(shareObjectPartitionToHclTerraform, true)(struct!.partition),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ShareObjectPartitionList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class ShareObjectOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -844,4 +979,48 @@ export class Share extends cdktf.TerraformResource {
object: cdktf.listMapper(shareObjectToTerraform, true)(this._object.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ created_at: {
+ value: cdktf.numberToHclTerraform(this._createdAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ created_by: {
+ value: cdktf.stringToHclTerraform(this._createdBy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ object: {
+ value: cdktf.listMapperHcl(shareObjectToHclTerraform, true)(this._object.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "ShareObjectList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/sql-alert/README.md b/src/sql-alert/README.md
index 30c2dbf07..2cacae6fe 100644
--- a/src/sql-alert/README.md
+++ b/src/sql-alert/README.md
@@ -1,3 +1,3 @@
# `databricks_sql_alert`
-Refer to the Terraform Registory for docs: [`databricks_sql_alert`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_alert).
+Refer to the Terraform Registry for docs: [`databricks_sql_alert`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_alert).
diff --git a/src/sql-alert/index.ts b/src/sql-alert/index.ts
index 6b8249d02..6f8121c2d 100644
--- a/src/sql-alert/index.ts
+++ b/src/sql-alert/index.ts
@@ -97,6 +97,61 @@ export function sqlAlertOptionsToTerraform(struct?: SqlAlertOptionsOutputReferen
}
}
+
+export function sqlAlertOptionsToHclTerraform(struct?: SqlAlertOptionsOutputReference | SqlAlertOptions): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ column: {
+ value: cdktf.stringToHclTerraform(struct!.column),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_body: {
+ value: cdktf.stringToHclTerraform(struct!.customBody),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ custom_subject: {
+ value: cdktf.stringToHclTerraform(struct!.customSubject),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ empty_result_state: {
+ value: cdktf.stringToHclTerraform(struct!.emptyResultState),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ muted: {
+ value: cdktf.booleanToHclTerraform(struct!.muted),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ op: {
+ value: cdktf.stringToHclTerraform(struct!.op),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlAlertOptionsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -469,4 +524,60 @@ export class SqlAlert extends cdktf.TerraformResource {
options: sqlAlertOptionsToTerraform(this._options.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ created_at: {
+ value: cdktf.stringToHclTerraform(this._createdAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parent: {
+ value: cdktf.stringToHclTerraform(this._parent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ query_id: {
+ value: cdktf.stringToHclTerraform(this._queryId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ rearm: {
+ value: cdktf.numberToHclTerraform(this._rearm),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ updated_at: {
+ value: cdktf.stringToHclTerraform(this._updatedAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ options: {
+ value: sqlAlertOptionsToHclTerraform(this._options.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlAlertOptionsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/sql-dashboard/README.md b/src/sql-dashboard/README.md
index ad4a62759..d20ce478c 100644
--- a/src/sql-dashboard/README.md
+++ b/src/sql-dashboard/README.md
@@ -1,3 +1,3 @@
# `databricks_sql_dashboard`
-Refer to the Terraform Registory for docs: [`databricks_sql_dashboard`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_dashboard).
+Refer to the Terraform Registry for docs: [`databricks_sql_dashboard`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_dashboard).
diff --git a/src/sql-dashboard/index.ts b/src/sql-dashboard/index.ts
index 4d47ce3b4..20ead2a6a 100644
--- a/src/sql-dashboard/index.ts
+++ b/src/sql-dashboard/index.ts
@@ -255,4 +255,60 @@ export class SqlDashboard extends cdktf.TerraformResource {
updated_at: cdktf.stringToTerraform(this._updatedAt),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ created_at: {
+ value: cdktf.stringToHclTerraform(this._createdAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ dashboard_filters_enabled: {
+ value: cdktf.booleanToHclTerraform(this._dashboardFiltersEnabled),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parent: {
+ value: cdktf.stringToHclTerraform(this._parent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ run_as_role: {
+ value: cdktf.stringToHclTerraform(this._runAsRole),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tags: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._tags),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ updated_at: {
+ value: cdktf.stringToHclTerraform(this._updatedAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/sql-endpoint/README.md b/src/sql-endpoint/README.md
index 8468c59a8..3c0476442 100644
--- a/src/sql-endpoint/README.md
+++ b/src/sql-endpoint/README.md
@@ -1,3 +1,3 @@
# `databricks_sql_endpoint`
-Refer to the Terraform Registory for docs: [`databricks_sql_endpoint`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_endpoint).
+Refer to the Terraform Registry for docs: [`databricks_sql_endpoint`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_endpoint).
diff --git a/src/sql-endpoint/index.ts b/src/sql-endpoint/index.ts
index f62330819..000cd46c6 100644
--- a/src/sql-endpoint/index.ts
+++ b/src/sql-endpoint/index.ts
@@ -117,6 +117,25 @@ export function sqlEndpointChannelToTerraform(struct?: SqlEndpointChannelOutputR
}
}
+
+export function sqlEndpointChannelToHclTerraform(struct?: SqlEndpointChannelOutputReference | SqlEndpointChannel): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlEndpointChannelOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -202,6 +221,49 @@ export function sqlEndpointOdbcParamsToTerraform(struct?: SqlEndpointOdbcParamsO
}
}
+
+export function sqlEndpointOdbcParamsToHclTerraform(struct?: SqlEndpointOdbcParamsOutputReference | SqlEndpointOdbcParams): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ host: {
+ value: cdktf.stringToHclTerraform(struct!.host),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ hostname: {
+ value: cdktf.stringToHclTerraform(struct!.hostname),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(struct!.path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ port: {
+ value: cdktf.numberToHclTerraform(struct!.port),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ protocol: {
+ value: cdktf.stringToHclTerraform(struct!.protocol),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlEndpointOdbcParamsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -351,6 +413,31 @@ export function sqlEndpointTagsCustomTagsToTerraform(struct?: SqlEndpointTagsCus
}
}
+
+export function sqlEndpointTagsCustomTagsToHclTerraform(struct?: SqlEndpointTagsCustomTags | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ key: {
+ value: cdktf.stringToHclTerraform(struct!.key),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlEndpointTagsCustomTagsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -466,6 +553,25 @@ export function sqlEndpointTagsToTerraform(struct?: SqlEndpointTagsOutputReferen
}
}
+
+export function sqlEndpointTagsToHclTerraform(struct?: SqlEndpointTagsOutputReference | SqlEndpointTags): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ custom_tags: {
+ value: cdktf.listMapperHcl(sqlEndpointTagsCustomTagsToHclTerraform, true)(struct!.customTags),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlEndpointTagsCustomTagsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlEndpointTagsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -528,6 +634,25 @@ export function sqlEndpointTimeoutsToTerraform(struct?: SqlEndpointTimeouts | cd
}
}
+
+export function sqlEndpointTimeoutsToHclTerraform(struct?: SqlEndpointTimeouts | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ create: {
+ value: cdktf.stringToHclTerraform(struct!.create),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlEndpointTimeoutsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -988,4 +1113,126 @@ export class SqlEndpoint extends cdktf.TerraformResource {
timeouts: sqlEndpointTimeoutsToTerraform(this._timeouts.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ auto_stop_mins: {
+ value: cdktf.numberToHclTerraform(this._autoStopMins),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ cluster_size: {
+ value: cdktf.stringToHclTerraform(this._clusterSize),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ data_source_id: {
+ value: cdktf.stringToHclTerraform(this._dataSourceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ enable_photon: {
+ value: cdktf.booleanToHclTerraform(this._enablePhoton),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ enable_serverless_compute: {
+ value: cdktf.booleanToHclTerraform(this._enableServerlessCompute),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(this._instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ jdbc_url: {
+ value: cdktf.stringToHclTerraform(this._jdbcUrl),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ max_num_clusters: {
+ value: cdktf.numberToHclTerraform(this._maxNumClusters),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ min_num_clusters: {
+ value: cdktf.numberToHclTerraform(this._minNumClusters),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ num_clusters: {
+ value: cdktf.numberToHclTerraform(this._numClusters),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ spot_instance_policy: {
+ value: cdktf.stringToHclTerraform(this._spotInstancePolicy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ state: {
+ value: cdktf.stringToHclTerraform(this._state),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ warehouse_type: {
+ value: cdktf.stringToHclTerraform(this._warehouseType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ channel: {
+ value: sqlEndpointChannelToHclTerraform(this._channel.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlEndpointChannelList",
+ },
+ odbc_params: {
+ value: sqlEndpointOdbcParamsToHclTerraform(this._odbcParams.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlEndpointOdbcParamsList",
+ },
+ tags: {
+ value: sqlEndpointTagsToHclTerraform(this._tags.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlEndpointTagsList",
+ },
+ timeouts: {
+ value: sqlEndpointTimeoutsToHclTerraform(this._timeouts.internalValue),
+ isBlock: true,
+ type: "struct",
+ storageClassType: "SqlEndpointTimeouts",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/sql-global-config/README.md b/src/sql-global-config/README.md
index eab358f72..16c19cf44 100644
--- a/src/sql-global-config/README.md
+++ b/src/sql-global-config/README.md
@@ -1,3 +1,3 @@
# `databricks_sql_global_config`
-Refer to the Terraform Registory for docs: [`databricks_sql_global_config`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_global_config).
+Refer to the Terraform Registry for docs: [`databricks_sql_global_config`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_global_config).
diff --git a/src/sql-global-config/index.ts b/src/sql-global-config/index.ts
index 1ede975c8..97c09b171 100644
--- a/src/sql-global-config/index.ts
+++ b/src/sql-global-config/index.ts
@@ -236,4 +236,54 @@ export class SqlGlobalConfig extends cdktf.TerraformResource {
sql_config_params: cdktf.hashMapper(cdktf.stringToTerraform)(this._sqlConfigParams),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ data_access_config: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._dataAccessConfig),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ enable_serverless_compute: {
+ value: cdktf.booleanToHclTerraform(this._enableServerlessCompute),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ google_service_account: {
+ value: cdktf.stringToHclTerraform(this._googleServiceAccount),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profile_arn: {
+ value: cdktf.stringToHclTerraform(this._instanceProfileArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ security_policy: {
+ value: cdktf.stringToHclTerraform(this._securityPolicy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ sql_config_params: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._sqlConfigParams),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/sql-permissions/README.md b/src/sql-permissions/README.md
index 058a6f265..b244e2c5e 100644
--- a/src/sql-permissions/README.md
+++ b/src/sql-permissions/README.md
@@ -1,3 +1,3 @@
# `databricks_sql_permissions`
-Refer to the Terraform Registory for docs: [`databricks_sql_permissions`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_permissions).
+Refer to the Terraform Registry for docs: [`databricks_sql_permissions`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_permissions).
diff --git a/src/sql-permissions/index.ts b/src/sql-permissions/index.ts
index 0321ac317..2ff0cf0ec 100644
--- a/src/sql-permissions/index.ts
+++ b/src/sql-permissions/index.ts
@@ -76,6 +76,31 @@ export function sqlPermissionsPrivilegeAssignmentsToTerraform(struct?: SqlPermis
}
}
+
+export function sqlPermissionsPrivilegeAssignmentsToHclTerraform(struct?: SqlPermissionsPrivilegeAssignments | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ principal: {
+ value: cdktf.stringToHclTerraform(struct!.principal),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ privileges: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.privileges),
+ isBlock: false,
+ type: "set",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlPermissionsPrivilegeAssignmentsOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -400,4 +425,66 @@ export class SqlPermissions extends cdktf.TerraformResource {
privilege_assignments: cdktf.listMapper(sqlPermissionsPrivilegeAssignmentsToTerraform, true)(this._privilegeAssignments.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ anonymous_function: {
+ value: cdktf.booleanToHclTerraform(this._anonymousFunction),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ any_file: {
+ value: cdktf.booleanToHclTerraform(this._anyFile),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ catalog: {
+ value: cdktf.booleanToHclTerraform(this._catalog),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ database: {
+ value: cdktf.stringToHclTerraform(this._database),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ table: {
+ value: cdktf.stringToHclTerraform(this._table),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ view: {
+ value: cdktf.stringToHclTerraform(this._view),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ privilege_assignments: {
+ value: cdktf.listMapperHcl(sqlPermissionsPrivilegeAssignmentsToHclTerraform, true)(this._privilegeAssignments.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "SqlPermissionsPrivilegeAssignmentsList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/sql-query/README.md b/src/sql-query/README.md
index a9591c287..ce22991b5 100644
--- a/src/sql-query/README.md
+++ b/src/sql-query/README.md
@@ -1,3 +1,3 @@
# `databricks_sql_query`
-Refer to the Terraform Registory for docs: [`databricks_sql_query`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_query).
+Refer to the Terraform Registry for docs: [`databricks_sql_query`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_query).
diff --git a/src/sql-query/index.ts b/src/sql-query/index.ts
index 6274f255e..50a861fbe 100644
--- a/src/sql-query/index.ts
+++ b/src/sql-query/index.ts
@@ -85,6 +85,25 @@ export function sqlQueryParameterDateToTerraform(struct?: SqlQueryParameterDateO
}
}
+
+export function sqlQueryParameterDateToHclTerraform(struct?: SqlQueryParameterDateOutputReference | SqlQueryParameterDate): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterDateOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -152,6 +171,31 @@ export function sqlQueryParameterDateRangeRangeToTerraform(struct?: SqlQueryPara
}
}
+
+export function sqlQueryParameterDateRangeRangeToHclTerraform(struct?: SqlQueryParameterDateRangeRangeOutputReference | SqlQueryParameterDateRangeRange): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ end: {
+ value: cdktf.stringToHclTerraform(struct!.end),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ start: {
+ value: cdktf.stringToHclTerraform(struct!.start),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterDateRangeRangeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -240,6 +284,31 @@ export function sqlQueryParameterDateRangeToTerraform(struct?: SqlQueryParameter
}
}
+
+export function sqlQueryParameterDateRangeToHclTerraform(struct?: SqlQueryParameterDateRangeOutputReference | SqlQueryParameterDateRange): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ range: {
+ value: sqlQueryParameterDateRangeRangeToHclTerraform(struct!.range),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterDateRangeRangeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterDateRangeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -327,6 +396,25 @@ export function sqlQueryParameterDatetimeToTerraform(struct?: SqlQueryParameterD
}
}
+
+export function sqlQueryParameterDatetimeToHclTerraform(struct?: SqlQueryParameterDatetimeOutputReference | SqlQueryParameterDatetime): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterDatetimeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -394,6 +482,31 @@ export function sqlQueryParameterDatetimeRangeRangeToTerraform(struct?: SqlQuery
}
}
+
+export function sqlQueryParameterDatetimeRangeRangeToHclTerraform(struct?: SqlQueryParameterDatetimeRangeRangeOutputReference | SqlQueryParameterDatetimeRangeRange): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ end: {
+ value: cdktf.stringToHclTerraform(struct!.end),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ start: {
+ value: cdktf.stringToHclTerraform(struct!.start),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterDatetimeRangeRangeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -482,6 +595,31 @@ export function sqlQueryParameterDatetimeRangeToTerraform(struct?: SqlQueryParam
}
}
+
+export function sqlQueryParameterDatetimeRangeToHclTerraform(struct?: SqlQueryParameterDatetimeRangeOutputReference | SqlQueryParameterDatetimeRange): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ range: {
+ value: sqlQueryParameterDatetimeRangeRangeToHclTerraform(struct!.range),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterDatetimeRangeRangeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterDatetimeRangeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -569,6 +707,25 @@ export function sqlQueryParameterDatetimesecToTerraform(struct?: SqlQueryParamet
}
}
+
+export function sqlQueryParameterDatetimesecToHclTerraform(struct?: SqlQueryParameterDatetimesecOutputReference | SqlQueryParameterDatetimesec): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterDatetimesecOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -636,6 +793,31 @@ export function sqlQueryParameterDatetimesecRangeRangeToTerraform(struct?: SqlQu
}
}
+
+export function sqlQueryParameterDatetimesecRangeRangeToHclTerraform(struct?: SqlQueryParameterDatetimesecRangeRangeOutputReference | SqlQueryParameterDatetimesecRangeRange): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ end: {
+ value: cdktf.stringToHclTerraform(struct!.end),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ start: {
+ value: cdktf.stringToHclTerraform(struct!.start),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterDatetimesecRangeRangeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -724,6 +906,31 @@ export function sqlQueryParameterDatetimesecRangeToTerraform(struct?: SqlQueryPa
}
}
+
+export function sqlQueryParameterDatetimesecRangeToHclTerraform(struct?: SqlQueryParameterDatetimesecRangeOutputReference | SqlQueryParameterDatetimesecRange): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ range: {
+ value: sqlQueryParameterDatetimesecRangeRangeToHclTerraform(struct!.range),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterDatetimesecRangeRangeList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterDatetimesecRangeOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -821,6 +1028,37 @@ export function sqlQueryParameterEnumMultipleToTerraform(struct?: SqlQueryParame
}
}
+
+export function sqlQueryParameterEnumMultipleToHclTerraform(struct?: SqlQueryParameterEnumMultipleOutputReference | SqlQueryParameterEnumMultiple): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ prefix: {
+ value: cdktf.stringToHclTerraform(struct!.prefix),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ separator: {
+ value: cdktf.stringToHclTerraform(struct!.separator),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ suffix: {
+ value: cdktf.stringToHclTerraform(struct!.suffix),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterEnumMultipleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -944,6 +1182,43 @@ export function sqlQueryParameterEnumToTerraform(struct?: SqlQueryParameterEnumO
}
}
+
+export function sqlQueryParameterEnumToHclTerraform(struct?: SqlQueryParameterEnumOutputReference | SqlQueryParameterEnum): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ options: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.options),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ values: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.values),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ multiple: {
+ value: sqlQueryParameterEnumMultipleToHclTerraform(struct!.multiple),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterEnumMultipleList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterEnumOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1072,6 +1347,25 @@ export function sqlQueryParameterNumberToTerraform(struct?: SqlQueryParameterNum
}
}
+
+export function sqlQueryParameterNumberToHclTerraform(struct?: SqlQueryParameterNumberOutputReference | SqlQueryParameterNumber): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.numberToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterNumberOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1144,6 +1438,37 @@ export function sqlQueryParameterQueryMultipleToTerraform(struct?: SqlQueryParam
}
}
+
+export function sqlQueryParameterQueryMultipleToHclTerraform(struct?: SqlQueryParameterQueryMultipleOutputReference | SqlQueryParameterQueryMultiple): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ prefix: {
+ value: cdktf.stringToHclTerraform(struct!.prefix),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ separator: {
+ value: cdktf.stringToHclTerraform(struct!.separator),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ suffix: {
+ value: cdktf.stringToHclTerraform(struct!.suffix),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterQueryMultipleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1267,6 +1592,43 @@ export function sqlQueryParameterQueryToTerraform(struct?: SqlQueryParameterQuer
}
}
+
+export function sqlQueryParameterQueryToHclTerraform(struct?: SqlQueryParameterQueryOutputReference | SqlQueryParameterQuery): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ query_id: {
+ value: cdktf.stringToHclTerraform(struct!.queryId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ values: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.values),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ multiple: {
+ value: sqlQueryParameterQueryMultipleToHclTerraform(struct!.multiple),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterQueryMultipleList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterQueryOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1395,6 +1757,25 @@ export function sqlQueryParameterTextToTerraform(struct?: SqlQueryParameterTextO
}
}
+
+export function sqlQueryParameterTextToHclTerraform(struct?: SqlQueryParameterTextOutputReference | SqlQueryParameterText): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterTextOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1532,6 +1913,91 @@ export function sqlQueryParameterToTerraform(struct?: SqlQueryParameter | cdktf.
}
}
+
+export function sqlQueryParameterToHclTerraform(struct?: SqlQueryParameter | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ title: {
+ value: cdktf.stringToHclTerraform(struct!.title),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ date: {
+ value: sqlQueryParameterDateToHclTerraform(struct!.date),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterDateList",
+ },
+ date_range: {
+ value: sqlQueryParameterDateRangeToHclTerraform(struct!.dateRange),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterDateRangeList",
+ },
+ datetime: {
+ value: sqlQueryParameterDatetimeToHclTerraform(struct!.datetime),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterDatetimeList",
+ },
+ datetime_range: {
+ value: sqlQueryParameterDatetimeRangeToHclTerraform(struct!.datetimeRange),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterDatetimeRangeList",
+ },
+ datetimesec: {
+ value: sqlQueryParameterDatetimesecToHclTerraform(struct!.datetimesec),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterDatetimesecList",
+ },
+ datetimesec_range: {
+ value: sqlQueryParameterDatetimesecRangeToHclTerraform(struct!.datetimesecRange),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterDatetimesecRangeList",
+ },
+ enum: {
+ value: sqlQueryParameterEnumToHclTerraform(struct!.enum),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterEnumList",
+ },
+ number: {
+ value: sqlQueryParameterNumberToHclTerraform(struct!.number),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterNumberList",
+ },
+ query: {
+ value: sqlQueryParameterQueryToHclTerraform(struct!.query),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterQueryList",
+ },
+ text: {
+ value: sqlQueryParameterTextToHclTerraform(struct!.text),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterTextList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryParameterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -1873,6 +2339,31 @@ export function sqlQueryScheduleContinuousToTerraform(struct?: SqlQueryScheduleC
}
}
+
+export function sqlQueryScheduleContinuousToHclTerraform(struct?: SqlQueryScheduleContinuousOutputReference | SqlQueryScheduleContinuous): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ interval_seconds: {
+ value: cdktf.numberToHclTerraform(struct!.intervalSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ until_date: {
+ value: cdktf.stringToHclTerraform(struct!.untilDate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryScheduleContinuousOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -1967,6 +2458,37 @@ export function sqlQueryScheduleDailyToTerraform(struct?: SqlQueryScheduleDailyO
}
}
+
+export function sqlQueryScheduleDailyToHclTerraform(struct?: SqlQueryScheduleDailyOutputReference | SqlQueryScheduleDaily): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ interval_days: {
+ value: cdktf.numberToHclTerraform(struct!.intervalDays),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ time_of_day: {
+ value: cdktf.stringToHclTerraform(struct!.timeOfDay),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ until_date: {
+ value: cdktf.stringToHclTerraform(struct!.untilDate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryScheduleDailyOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2085,6 +2607,43 @@ export function sqlQueryScheduleWeeklyToTerraform(struct?: SqlQueryScheduleWeekl
}
}
+
+export function sqlQueryScheduleWeeklyToHclTerraform(struct?: SqlQueryScheduleWeeklyOutputReference | SqlQueryScheduleWeekly): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ day_of_week: {
+ value: cdktf.stringToHclTerraform(struct!.dayOfWeek),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ interval_weeks: {
+ value: cdktf.numberToHclTerraform(struct!.intervalWeeks),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ time_of_day: {
+ value: cdktf.stringToHclTerraform(struct!.timeOfDay),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ until_date: {
+ value: cdktf.stringToHclTerraform(struct!.untilDate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryScheduleWeeklyOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2223,6 +2782,37 @@ export function sqlQueryScheduleToTerraform(struct?: SqlQueryScheduleOutputRefer
}
}
+
+export function sqlQueryScheduleToHclTerraform(struct?: SqlQueryScheduleOutputReference | SqlQuerySchedule): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ continuous: {
+ value: sqlQueryScheduleContinuousToHclTerraform(struct!.continuous),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryScheduleContinuousList",
+ },
+ daily: {
+ value: sqlQueryScheduleDailyToHclTerraform(struct!.daily),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryScheduleDailyList",
+ },
+ weekly: {
+ value: sqlQueryScheduleWeeklyToHclTerraform(struct!.weekly),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryScheduleWeeklyList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlQueryScheduleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -2588,4 +3178,84 @@ export class SqlQuery extends cdktf.TerraformResource {
schedule: sqlQueryScheduleToTerraform(this._schedule.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ created_at: {
+ value: cdktf.stringToHclTerraform(this._createdAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ data_source_id: {
+ value: cdktf.stringToHclTerraform(this._dataSourceId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parent: {
+ value: cdktf.stringToHclTerraform(this._parent),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ query: {
+ value: cdktf.stringToHclTerraform(this._query),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ run_as_role: {
+ value: cdktf.stringToHclTerraform(this._runAsRole),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ tags: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._tags),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ updated_at: {
+ value: cdktf.stringToHclTerraform(this._updatedAt),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameter: {
+ value: cdktf.listMapperHcl(sqlQueryParameterToHclTerraform, true)(this._parameter.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryParameterList",
+ },
+ schedule: {
+ value: sqlQueryScheduleToHclTerraform(this._schedule.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlQueryScheduleList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/sql-table/README.md b/src/sql-table/README.md
index b335ff5d3..d16c52477 100644
--- a/src/sql-table/README.md
+++ b/src/sql-table/README.md
@@ -1,3 +1,3 @@
# `databricks_sql_table`
-Refer to the Terraform Registory for docs: [`databricks_sql_table`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_table).
+Refer to the Terraform Registry for docs: [`databricks_sql_table`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_table).
diff --git a/src/sql-table/index.ts b/src/sql-table/index.ts
index 4868fae9d..7175f1955 100644
--- a/src/sql-table/index.ts
+++ b/src/sql-table/index.ts
@@ -118,6 +118,43 @@ export function sqlTableColumnToTerraform(struct?: SqlTableColumn | cdktf.IResol
}
}
+
+export function sqlTableColumnToHclTerraform(struct?: SqlTableColumn | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ comment: {
+ value: cdktf.stringToHclTerraform(struct!.comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ nullable: {
+ value: cdktf.booleanToHclTerraform(struct!.nullable),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ type: {
+ value: cdktf.stringToHclTerraform(struct!.type),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlTableColumnOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -621,4 +658,114 @@ export class SqlTable extends cdktf.TerraformResource {
column: cdktf.listMapper(sqlTableColumnToTerraform, true)(this._column.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(this._catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_id: {
+ value: cdktf.stringToHclTerraform(this._clusterId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ cluster_keys: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._clusterKeys),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ data_source_format: {
+ value: cdktf.stringToHclTerraform(this._dataSourceFormat),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ options: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._options),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ partitions: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(this._partitions),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ properties: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._properties),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ schema_name: {
+ value: cdktf.stringToHclTerraform(this._schemaName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_credential_name: {
+ value: cdktf.stringToHclTerraform(this._storageCredentialName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_location: {
+ value: cdktf.stringToHclTerraform(this._storageLocation),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ table_type: {
+ value: cdktf.stringToHclTerraform(this._tableType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ view_definition: {
+ value: cdktf.stringToHclTerraform(this._viewDefinition),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ warehouse_id: {
+ value: cdktf.stringToHclTerraform(this._warehouseId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ column: {
+ value: cdktf.listMapperHcl(sqlTableColumnToHclTerraform, true)(this._column.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlTableColumnList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/sql-visualization/README.md b/src/sql-visualization/README.md
index 753757887..4e27c41cb 100644
--- a/src/sql-visualization/README.md
+++ b/src/sql-visualization/README.md
@@ -1,3 +1,3 @@
# `databricks_sql_visualization`
-Refer to the Terraform Registory for docs: [`databricks_sql_visualization`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_visualization).
+Refer to the Terraform Registry for docs: [`databricks_sql_visualization`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_visualization).
diff --git a/src/sql-visualization/index.ts b/src/sql-visualization/index.ts
index 256a31437..b3b19c39e 100644
--- a/src/sql-visualization/index.ts
+++ b/src/sql-visualization/index.ts
@@ -246,4 +246,60 @@ export class SqlVisualization extends cdktf.TerraformResource {
visualization_id: cdktf.stringToTerraform(this._visualizationId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ options: {
+ value: cdktf.stringToHclTerraform(this._options),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ query_id: {
+ value: cdktf.stringToHclTerraform(this._queryId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ query_plan: {
+ value: cdktf.stringToHclTerraform(this._queryPlan),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ type: {
+ value: cdktf.stringToHclTerraform(this._type),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ visualization_id: {
+ value: cdktf.stringToHclTerraform(this._visualizationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/sql-widget/README.md b/src/sql-widget/README.md
index bd6d6c2eb..6de864df6 100644
--- a/src/sql-widget/README.md
+++ b/src/sql-widget/README.md
@@ -1,3 +1,3 @@
# `databricks_sql_widget`
-Refer to the Terraform Registory for docs: [`databricks_sql_widget`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_widget).
+Refer to the Terraform Registry for docs: [`databricks_sql_widget`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/sql_widget).
diff --git a/src/sql-widget/index.ts b/src/sql-widget/index.ts
index c6f920716..692b2a4ca 100644
--- a/src/sql-widget/index.ts
+++ b/src/sql-widget/index.ts
@@ -98,6 +98,55 @@ export function sqlWidgetParameterToTerraform(struct?: SqlWidgetParameter | cdkt
}
}
+
+export function sqlWidgetParameterToHclTerraform(struct?: SqlWidgetParameter | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ map_to: {
+ value: cdktf.stringToHclTerraform(struct!.mapTo),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ title: {
+ value: cdktf.stringToHclTerraform(struct!.title),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ type: {
+ value: cdktf.stringToHclTerraform(struct!.type),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ value: {
+ value: cdktf.stringToHclTerraform(struct!.value),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ values: {
+ value: cdktf.listMapperHcl(cdktf.stringToHclTerraform, false)(struct!.values),
+ isBlock: false,
+ type: "list",
+ storageClassType: "stringList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlWidgetParameterOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -319,6 +368,49 @@ export function sqlWidgetPositionToTerraform(struct?: SqlWidgetPositionOutputRef
}
}
+
+export function sqlWidgetPositionToHclTerraform(struct?: SqlWidgetPositionOutputReference | SqlWidgetPosition): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ auto_height: {
+ value: cdktf.booleanToHclTerraform(struct!.autoHeight),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ pos_x: {
+ value: cdktf.numberToHclTerraform(struct!.posX),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ pos_y: {
+ value: cdktf.numberToHclTerraform(struct!.posY),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ size_x: {
+ value: cdktf.numberToHclTerraform(struct!.sizeX),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ size_y: {
+ value: cdktf.numberToHclTerraform(struct!.sizeY),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class SqlWidgetPositionOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -674,4 +766,66 @@ export class SqlWidget extends cdktf.TerraformResource {
position: sqlWidgetPositionToTerraform(this._position.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ dashboard_id: {
+ value: cdktf.stringToHclTerraform(this._dashboardId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ description: {
+ value: cdktf.stringToHclTerraform(this._description),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ text: {
+ value: cdktf.stringToHclTerraform(this._text),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ title: {
+ value: cdktf.stringToHclTerraform(this._title),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ visualization_id: {
+ value: cdktf.stringToHclTerraform(this._visualizationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ widget_id: {
+ value: cdktf.stringToHclTerraform(this._widgetId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ parameter: {
+ value: cdktf.listMapperHcl(sqlWidgetParameterToHclTerraform, true)(this._parameter.internalValue),
+ isBlock: true,
+ type: "set",
+ storageClassType: "SqlWidgetParameterList",
+ },
+ position: {
+ value: sqlWidgetPositionToHclTerraform(this._position.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "SqlWidgetPositionList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/storage-credential/README.md b/src/storage-credential/README.md
index 01390d941..d6f254217 100644
--- a/src/storage-credential/README.md
+++ b/src/storage-credential/README.md
@@ -1,3 +1,3 @@
# `databricks_storage_credential`
-Refer to the Terraform Registory for docs: [`databricks_storage_credential`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/storage_credential).
+Refer to the Terraform Registry for docs: [`databricks_storage_credential`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/storage_credential).
diff --git a/src/storage-credential/index.ts b/src/storage-credential/index.ts
index 9aa3bfcf6..30bf1fe81 100644
--- a/src/storage-credential/index.ts
+++ b/src/storage-credential/index.ts
@@ -105,6 +105,37 @@ export function storageCredentialAwsIamRoleToTerraform(struct?: StorageCredentia
}
}
+
+export function storageCredentialAwsIamRoleToHclTerraform(struct?: StorageCredentialAwsIamRoleOutputReference | StorageCredentialAwsIamRole): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ external_id: {
+ value: cdktf.stringToHclTerraform(struct!.externalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ role_arn: {
+ value: cdktf.stringToHclTerraform(struct!.roleArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ unity_catalog_iam_arn: {
+ value: cdktf.stringToHclTerraform(struct!.unityCatalogIamArn),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class StorageCredentialAwsIamRoleOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -221,6 +252,37 @@ export function storageCredentialAzureManagedIdentityToTerraform(struct?: Storag
}
}
+
+export function storageCredentialAzureManagedIdentityToHclTerraform(struct?: StorageCredentialAzureManagedIdentityOutputReference | StorageCredentialAzureManagedIdentity): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ access_connector_id: {
+ value: cdktf.stringToHclTerraform(struct!.accessConnectorId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ credential_id: {
+ value: cdktf.stringToHclTerraform(struct!.credentialId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ managed_identity_id: {
+ value: cdktf.stringToHclTerraform(struct!.managedIdentityId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class StorageCredentialAzureManagedIdentityOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -337,6 +399,37 @@ export function storageCredentialAzureServicePrincipalToTerraform(struct?: Stora
}
}
+
+export function storageCredentialAzureServicePrincipalToHclTerraform(struct?: StorageCredentialAzureServicePrincipalOutputReference | StorageCredentialAzureServicePrincipal): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ application_id: {
+ value: cdktf.stringToHclTerraform(struct!.applicationId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ client_secret: {
+ value: cdktf.stringToHclTerraform(struct!.clientSecret),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ directory_id: {
+ value: cdktf.stringToHclTerraform(struct!.directoryId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class StorageCredentialAzureServicePrincipalOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -442,6 +535,31 @@ export function storageCredentialDatabricksGcpServiceAccountToTerraform(struct?:
}
}
+
+export function storageCredentialDatabricksGcpServiceAccountToHclTerraform(struct?: StorageCredentialDatabricksGcpServiceAccountOutputReference | StorageCredentialDatabricksGcpServiceAccount): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ credential_id: {
+ value: cdktf.stringToHclTerraform(struct!.credentialId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ email: {
+ value: cdktf.stringToHclTerraform(struct!.email),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class StorageCredentialDatabricksGcpServiceAccountOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -539,6 +657,37 @@ export function storageCredentialGcpServiceAccountKeyToTerraform(struct?: Storag
}
}
+
+export function storageCredentialGcpServiceAccountKeyToHclTerraform(struct?: StorageCredentialGcpServiceAccountKeyOutputReference | StorageCredentialGcpServiceAccountKey): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ email: {
+ value: cdktf.stringToHclTerraform(struct!.email),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_key: {
+ value: cdktf.stringToHclTerraform(struct!.privateKey),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ private_key_id: {
+ value: cdktf.stringToHclTerraform(struct!.privateKeyId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class StorageCredentialGcpServiceAccountKeyOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
@@ -919,4 +1068,90 @@ export class StorageCredential extends cdktf.TerraformResource {
gcp_service_account_key: storageCredentialGcpServiceAccountKeyToTerraform(this._gcpServiceAccountKey.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ force_destroy: {
+ value: cdktf.booleanToHclTerraform(this._forceDestroy),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ force_update: {
+ value: cdktf.booleanToHclTerraform(this._forceUpdate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ metastore_id: {
+ value: cdktf.stringToHclTerraform(this._metastoreId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ read_only: {
+ value: cdktf.booleanToHclTerraform(this._readOnly),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ aws_iam_role: {
+ value: storageCredentialAwsIamRoleToHclTerraform(this._awsIamRole.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "StorageCredentialAwsIamRoleList",
+ },
+ azure_managed_identity: {
+ value: storageCredentialAzureManagedIdentityToHclTerraform(this._azureManagedIdentity.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "StorageCredentialAzureManagedIdentityList",
+ },
+ azure_service_principal: {
+ value: storageCredentialAzureServicePrincipalToHclTerraform(this._azureServicePrincipal.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "StorageCredentialAzureServicePrincipalList",
+ },
+ databricks_gcp_service_account: {
+ value: storageCredentialDatabricksGcpServiceAccountToHclTerraform(this._databricksGcpServiceAccount.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "StorageCredentialDatabricksGcpServiceAccountList",
+ },
+ gcp_service_account_key: {
+ value: storageCredentialGcpServiceAccountKeyToHclTerraform(this._gcpServiceAccountKey.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "StorageCredentialGcpServiceAccountKeyList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/system-schema/README.md b/src/system-schema/README.md
index 1291d9191..ae6e69bde 100644
--- a/src/system-schema/README.md
+++ b/src/system-schema/README.md
@@ -1,3 +1,3 @@
# `databricks_system_schema`
-Refer to the Terraform Registory for docs: [`databricks_system_schema`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/system_schema).
+Refer to the Terraform Registry for docs: [`databricks_system_schema`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/system_schema).
diff --git a/src/system-schema/index.ts b/src/system-schema/index.ts
index a9d31b551..d1062d5d7 100644
--- a/src/system-schema/index.ts
+++ b/src/system-schema/index.ts
@@ -153,4 +153,30 @@ export class SystemSchema extends cdktf.TerraformResource {
state: cdktf.stringToTerraform(this._state),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ schema: {
+ value: cdktf.stringToHclTerraform(this._schema),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ state: {
+ value: cdktf.stringToHclTerraform(this._state),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/table/README.md b/src/table/README.md
index bf46aa14e..53057e8a5 100644
--- a/src/table/README.md
+++ b/src/table/README.md
@@ -1,3 +1,3 @@
# `databricks_table`
-Refer to the Terraform Registory for docs: [`databricks_table`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/table).
+Refer to the Terraform Registry for docs: [`databricks_table`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/table).
diff --git a/src/table/index.ts b/src/table/index.ts
index aa80a2b9c..e64e3948e 100644
--- a/src/table/index.ts
+++ b/src/table/index.ts
@@ -137,6 +137,85 @@ export function tableColumnToTerraform(struct?: TableColumn | cdktf.IResolvable)
}
}
+
+export function tableColumnToHclTerraform(struct?: TableColumn | cdktf.IResolvable): any {
+ if (!cdktf.canInspect(struct) || cdktf.Tokenization.isResolvable(struct)) { return struct; }
+ if (cdktf.isComplexElement(struct)) {
+ throw new Error("A complex element was used as configuration, this is not supported: https://cdk.tf/complex-object-as-configuration");
+ }
+ const attrs = {
+ comment: {
+ value: cdktf.stringToHclTerraform(struct!.comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(struct!.name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ nullable: {
+ value: cdktf.booleanToHclTerraform(struct!.nullable),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ partition_index: {
+ value: cdktf.numberToHclTerraform(struct!.partitionIndex),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ position: {
+ value: cdktf.numberToHclTerraform(struct!.position),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ type_interval_type: {
+ value: cdktf.stringToHclTerraform(struct!.typeIntervalType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ type_json: {
+ value: cdktf.stringToHclTerraform(struct!.typeJson),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ type_name: {
+ value: cdktf.stringToHclTerraform(struct!.typeName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ type_precision: {
+ value: cdktf.numberToHclTerraform(struct!.typePrecision),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ type_scale: {
+ value: cdktf.numberToHclTerraform(struct!.typeScale),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ type_text: {
+ value: cdktf.stringToHclTerraform(struct!.typeText),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined));
+}
+
export class TableColumnOutputReference extends cdktf.ComplexObject {
private isEmptyObject = false;
private resolvableValue?: cdktf.IResolvable;
@@ -707,4 +786,90 @@ export class Table extends cdktf.TerraformResource {
column: cdktf.listMapper(tableColumnToTerraform, true)(this._column.internalValue),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(this._catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ data_source_format: {
+ value: cdktf.stringToHclTerraform(this._dataSourceFormat),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ properties: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._properties),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ schema_name: {
+ value: cdktf.stringToHclTerraform(this._schemaName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_credential_name: {
+ value: cdktf.stringToHclTerraform(this._storageCredentialName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_location: {
+ value: cdktf.stringToHclTerraform(this._storageLocation),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ table_type: {
+ value: cdktf.stringToHclTerraform(this._tableType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ view_definition: {
+ value: cdktf.stringToHclTerraform(this._viewDefinition),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ column: {
+ value: cdktf.listMapperHcl(tableColumnToHclTerraform, true)(this._column.internalValue),
+ isBlock: true,
+ type: "list",
+ storageClassType: "TableColumnList",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/token/README.md b/src/token/README.md
index 2afd28dfe..798e3c49d 100644
--- a/src/token/README.md
+++ b/src/token/README.md
@@ -1,3 +1,3 @@
# `databricks_token`
-Refer to the Terraform Registory for docs: [`databricks_token`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/token).
+Refer to the Terraform Registry for docs: [`databricks_token`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/token).
diff --git a/src/token/index.ts b/src/token/index.ts
index a79fae819..d5aeb281e 100644
--- a/src/token/index.ts
+++ b/src/token/index.ts
@@ -219,4 +219,48 @@ export class Token extends cdktf.TerraformResource {
token_id: cdktf.stringToTerraform(this._tokenId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ creation_time: {
+ value: cdktf.numberToHclTerraform(this._creationTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ expiry_time: {
+ value: cdktf.numberToHclTerraform(this._expiryTime),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ lifetime_seconds: {
+ value: cdktf.numberToHclTerraform(this._lifetimeSeconds),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ token_id: {
+ value: cdktf.stringToHclTerraform(this._tokenId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/user-instance-profile/README.md b/src/user-instance-profile/README.md
index 07ca9cd20..ef0552d9b 100644
--- a/src/user-instance-profile/README.md
+++ b/src/user-instance-profile/README.md
@@ -1,3 +1,3 @@
# `databricks_user_instance_profile`
-Refer to the Terraform Registory for docs: [`databricks_user_instance_profile`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/user_instance_profile).
+Refer to the Terraform Registry for docs: [`databricks_user_instance_profile`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/user_instance_profile).
diff --git a/src/user-instance-profile/index.ts b/src/user-instance-profile/index.ts
index 6b6fe1f62..a1e081653 100644
--- a/src/user-instance-profile/index.ts
+++ b/src/user-instance-profile/index.ts
@@ -142,4 +142,30 @@ export class UserInstanceProfile extends cdktf.TerraformResource {
user_id: cdktf.stringToTerraform(this._userId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ instance_profile_id: {
+ value: cdktf.stringToHclTerraform(this._instanceProfileId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_id: {
+ value: cdktf.stringToHclTerraform(this._userId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/user-role/README.md b/src/user-role/README.md
index a1accf73e..2ab0e380a 100644
--- a/src/user-role/README.md
+++ b/src/user-role/README.md
@@ -1,3 +1,3 @@
# `databricks_user_role`
-Refer to the Terraform Registory for docs: [`databricks_user_role`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/user_role).
+Refer to the Terraform Registry for docs: [`databricks_user_role`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/user_role).
diff --git a/src/user-role/index.ts b/src/user-role/index.ts
index 4af1afc65..993b6cfcc 100644
--- a/src/user-role/index.ts
+++ b/src/user-role/index.ts
@@ -142,4 +142,30 @@ export class UserRole extends cdktf.TerraformResource {
user_id: cdktf.stringToTerraform(this._userId),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ role: {
+ value: cdktf.stringToHclTerraform(this._role),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_id: {
+ value: cdktf.stringToHclTerraform(this._userId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/user/README.md b/src/user/README.md
index 922b9a88f..bf944ffee 100644
--- a/src/user/README.md
+++ b/src/user/README.md
@@ -1,3 +1,3 @@
# `databricks_user`
-Refer to the Terraform Registory for docs: [`databricks_user`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/user).
+Refer to the Terraform Registry for docs: [`databricks_user`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/user).
diff --git a/src/user/index.ts b/src/user/index.ts
index 46a773858..90c8a8724 100644
--- a/src/user/index.ts
+++ b/src/user/index.ts
@@ -431,4 +431,108 @@ export class User extends cdktf.TerraformResource {
workspace_access: cdktf.booleanToTerraform(this._workspaceAccess),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ acl_principal_id: {
+ value: cdktf.stringToHclTerraform(this._aclPrincipalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ active: {
+ value: cdktf.booleanToHclTerraform(this._active),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ allow_cluster_create: {
+ value: cdktf.booleanToHclTerraform(this._allowClusterCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ allow_instance_pool_create: {
+ value: cdktf.booleanToHclTerraform(this._allowInstancePoolCreate),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ databricks_sql_access: {
+ value: cdktf.booleanToHclTerraform(this._databricksSqlAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ disable_as_user_deletion: {
+ value: cdktf.booleanToHclTerraform(this._disableAsUserDeletion),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ display_name: {
+ value: cdktf.stringToHclTerraform(this._displayName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ external_id: {
+ value: cdktf.stringToHclTerraform(this._externalId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ force: {
+ value: cdktf.booleanToHclTerraform(this._force),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ force_delete_home_dir: {
+ value: cdktf.booleanToHclTerraform(this._forceDeleteHomeDir),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ force_delete_repos: {
+ value: cdktf.booleanToHclTerraform(this._forceDeleteRepos),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ home: {
+ value: cdktf.stringToHclTerraform(this._home),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ repos: {
+ value: cdktf.stringToHclTerraform(this._repos),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ user_name: {
+ value: cdktf.stringToHclTerraform(this._userName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ workspace_access: {
+ value: cdktf.booleanToHclTerraform(this._workspaceAccess),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "boolean",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/volume/README.md b/src/volume/README.md
index c1cfe8fbf..93b1d18da 100644
--- a/src/volume/README.md
+++ b/src/volume/README.md
@@ -1,3 +1,3 @@
# `databricks_volume`
-Refer to the Terraform Registory for docs: [`databricks_volume`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/volume).
+Refer to the Terraform Registry for docs: [`databricks_volume`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/volume).
diff --git a/src/volume/index.ts b/src/volume/index.ts
index 1704623b8..8a86568a8 100644
--- a/src/volume/index.ts
+++ b/src/volume/index.ts
@@ -246,4 +246,60 @@ export class Volume extends cdktf.TerraformResource {
volume_type: cdktf.stringToTerraform(this._volumeType),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ catalog_name: {
+ value: cdktf.stringToHclTerraform(this._catalogName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ comment: {
+ value: cdktf.stringToHclTerraform(this._comment),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ name: {
+ value: cdktf.stringToHclTerraform(this._name),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ owner: {
+ value: cdktf.stringToHclTerraform(this._owner),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ schema_name: {
+ value: cdktf.stringToHclTerraform(this._schemaName),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ storage_location: {
+ value: cdktf.stringToHclTerraform(this._storageLocation),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ volume_type: {
+ value: cdktf.stringToHclTerraform(this._volumeType),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/workspace-conf/README.md b/src/workspace-conf/README.md
index 928cf9cee..faea412c7 100644
--- a/src/workspace-conf/README.md
+++ b/src/workspace-conf/README.md
@@ -1,3 +1,3 @@
# `databricks_workspace_conf`
-Refer to the Terraform Registory for docs: [`databricks_workspace_conf`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/workspace_conf).
+Refer to the Terraform Registry for docs: [`databricks_workspace_conf`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/workspace_conf).
diff --git a/src/workspace-conf/index.ts b/src/workspace-conf/index.ts
index 6dcd4c59e..5e202686d 100644
--- a/src/workspace-conf/index.ts
+++ b/src/workspace-conf/index.ts
@@ -126,4 +126,24 @@ export class WorkspaceConf extends cdktf.TerraformResource {
id: cdktf.stringToTerraform(this._id),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ custom_config: {
+ value: cdktf.hashMapperHcl(cdktf.stringToHclTerraform)(this._customConfig),
+ isBlock: false,
+ type: "map",
+ storageClassType: "stringMap",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/src/workspace-file/README.md b/src/workspace-file/README.md
index cb4f8a0c5..5f73819b0 100644
--- a/src/workspace-file/README.md
+++ b/src/workspace-file/README.md
@@ -1,3 +1,3 @@
# `databricks_workspace_file`
-Refer to the Terraform Registory for docs: [`databricks_workspace_file`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/workspace_file).
+Refer to the Terraform Registry for docs: [`databricks_workspace_file`](https://registry.terraform.io/providers/databricks/databricks/1.33.0/docs/resources/workspace_file).
diff --git a/src/workspace-file/index.ts b/src/workspace-file/index.ts
index 4316c7355..9e9b3fdf1 100644
--- a/src/workspace-file/index.ts
+++ b/src/workspace-file/index.ts
@@ -221,4 +221,48 @@ export class WorkspaceFile extends cdktf.TerraformResource {
source: cdktf.stringToTerraform(this._source),
};
}
+
+ protected synthesizeHclAttributes(): { [name: string]: any } {
+ const attrs = {
+ content_base64: {
+ value: cdktf.stringToHclTerraform(this._contentBase64),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ id: {
+ value: cdktf.stringToHclTerraform(this._id),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ md5: {
+ value: cdktf.stringToHclTerraform(this._md5),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ object_id: {
+ value: cdktf.numberToHclTerraform(this._objectId),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "number",
+ },
+ path: {
+ value: cdktf.stringToHclTerraform(this._path),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ source: {
+ value: cdktf.stringToHclTerraform(this._source),
+ isBlock: false,
+ type: "simple",
+ storageClassType: "string",
+ },
+ };
+
+ // remove undefined attributes
+ return Object.fromEntries(Object.entries(attrs).filter(([_, value]) => value !== undefined && value.value !== undefined ))
+ }
}
diff --git a/yarn.lock b/yarn.lock
index c6243e908..2f57f37c6 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -18,7 +18,7 @@
tunnel "^0.0.6"
undici "^5.25.4"
-"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.22.13":
+"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.18.6":
version "7.23.5"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244"
integrity sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==
@@ -26,22 +26,22 @@
"@babel/highlight" "^7.23.4"
chalk "^2.4.2"
-"@babel/generator@^7.21.4":
- version "7.23.6"
- resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.6.tgz#9e1fca4811c77a10580d17d26b57b036133f3c2e"
- integrity sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw==
+"@babel/generator@7.21.4":
+ version "7.21.4"
+ resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.21.4.tgz#64a94b7448989f421f919d5239ef553b37bb26bc"
+ integrity sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA==
dependencies:
- "@babel/types" "^7.23.6"
+ "@babel/types" "^7.21.4"
"@jridgewell/gen-mapping" "^0.3.2"
"@jridgewell/trace-mapping" "^0.3.17"
jsesc "^2.5.1"
-"@babel/helper-string-parser@^7.23.4":
+"@babel/helper-string-parser@^7.19.4", "@babel/helper-string-parser@^7.23.4":
version "7.23.4"
resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83"
integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==
-"@babel/helper-validator-identifier@^7.22.20":
+"@babel/helper-validator-identifier@^7.19.1", "@babel/helper-validator-identifier@^7.22.20":
version "7.22.20"
resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0"
integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==
@@ -55,21 +55,30 @@
chalk "^2.4.2"
js-tokens "^4.0.0"
-"@babel/parser@^7.22.15":
+"@babel/parser@^7.20.7":
version "7.23.6"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.6.tgz#ba1c9e512bda72a47e285ae42aff9d2a635a9e3b"
integrity sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==
-"@babel/template@^7.20.7":
- version "7.22.15"
- resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38"
- integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==
+"@babel/template@7.20.7":
+ version "7.20.7"
+ resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.20.7.tgz#a15090c2839a83b02aa996c0b4994005841fd5a8"
+ integrity sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==
+ dependencies:
+ "@babel/code-frame" "^7.18.6"
+ "@babel/parser" "^7.20.7"
+ "@babel/types" "^7.20.7"
+
+"@babel/types@7.21.4":
+ version "7.21.4"
+ resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.21.4.tgz#2d5d6bb7908699b3b416409ffd3b5daa25b030d4"
+ integrity sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA==
dependencies:
- "@babel/code-frame" "^7.22.13"
- "@babel/parser" "^7.22.15"
- "@babel/types" "^7.22.15"
+ "@babel/helper-string-parser" "^7.19.4"
+ "@babel/helper-validator-identifier" "^7.19.1"
+ to-fast-properties "^2.0.0"
-"@babel/types@^7.21.4", "@babel/types@^7.22.15", "@babel/types@^7.23.6":
+"@babel/types@^7.20.7", "@babel/types@^7.21.4":
version "7.23.6"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.6.tgz#be33fdb151e1f5a56877d704492c240fc71c7ccd"
integrity sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==
@@ -78,105 +87,113 @@
"@babel/helper-validator-identifier" "^7.22.20"
to-fast-properties "^2.0.0"
-"@cdktf/cli-core@0.19.2":
- version "0.19.2"
- resolved "https://registry.yarnpkg.com/@cdktf/cli-core/-/cli-core-0.19.2.tgz#09664244498eb44b31e3f2a374a9e8d13e170feb"
- integrity sha512-kjgEUhrHx3kUPfL7KsTo6GrurVUPT77FmOUf7wWXt7ajNE5zCPvx/HKGmQruzt0n6eLZp1aKT+r/D6YRfXcIGA==
+"@cdktf/cli-core@0.20.0":
+ version "0.20.0"
+ resolved "https://registry.yarnpkg.com/@cdktf/cli-core/-/cli-core-0.20.0.tgz#df254e3d38c8dfd6bf966e166465abeb8367b931"
+ integrity sha512-qTPWHi7d2jHd8eaeFMOU4Ybc8GfyhIGS3Ktq4tUgccrmUnhGOJ8b/CIa2kWTPk27gokz5SWj3p866FNYKV0aGQ==
dependencies:
- "@cdktf/commons" "0.19.2"
- "@cdktf/hcl2cdk" "0.19.2"
- "@cdktf/hcl2json" "0.19.2"
+ "@cdktf/commons" "0.20.0"
+ "@cdktf/hcl-tools" "0.20.0"
+ "@cdktf/hcl2cdk" "0.20.0"
+ "@cdktf/hcl2json" "0.20.0"
"@cdktf/node-pty-prebuilt-multiarch" "0.10.1-pre.11"
- "@cdktf/provider-schema" "0.19.2"
- "@sentry/node" "^7.64.0"
- archiver "^5.3.1"
- cdktf "0.19.2"
- chalk "^4.1.2"
- chokidar "^3.5.3"
- cli-spinners "2.7.0"
- codemaker "^1.86.1"
- constructs "^10.0.25"
- cross-fetch "^3.1.5"
- cross-spawn "^7.0.3"
- detect-port "^1.5.1"
- execa "^5.1.1"
- extract-zip "^2.0.1"
- follow-redirects "^1.15.2"
- fs-extra "^8.1.0"
- https-proxy-agent "^5.0.1"
- indent-string "^4.0.0"
- ink "^3.2.0"
- ink-select-input "^4.2.1"
- ink-spinner "^4.0.3"
- ink-testing-library "^2.1.0"
- ink-use-stdout-dimensions "^1.0.5"
- jsii "^5.1.10"
- jsii-pacmak "^1.87.0"
- jsii-srcmak "^0.1.951"
- lodash.isequal "^4.5.0"
- log4js "^6.7.0"
- minimatch "^5.1.0"
- node-fetch "^2.6.7"
- open "^7.4.2"
- parse-gitignore "^1.0.1"
- pkg-up "^3.1.0"
- semver "^7.5.3"
- sscaff "^1.2.274"
- stream-buffers "^3.0.2"
- strip-ansi "^6.0.1"
- tunnel-agent "^0.6.0"
- uuid "^8.3.2"
- xml-js "^1.6.11"
- xstate "^4.34.0"
- yargs "^17.6"
- yoga-layout-prebuilt "^1.10.0"
- zod "^3.22.4"
-
-"@cdktf/commons@0.19.2":
- version "0.19.2"
- resolved "https://registry.yarnpkg.com/@cdktf/commons/-/commons-0.19.2.tgz#3971e5a797f506d8b0468b9d36ed4074a6f936d8"
- integrity sha512-5rOeb0cSREHQa5XVsGFEV6Ce8Zwo2WxE8GIhmGd/JzeSAByhK8scHFlD3+eENl83W/8lwIkm/nSl9oDHEkENIg==
- dependencies:
- "@sentry/node" "^7.77.0"
- cdktf "0.19.2"
- ci-info "^3.9.0"
- codemaker "^1.91.0"
- constructs "^10.0.25"
- cross-spawn "^7.0.3"
- follow-redirects "^1.15.3"
- fs-extra "^11.1.1"
- is-valid-domain "^0.1.6"
- log4js "^6.9.1"
- uuid "^9.0.1"
-
-"@cdktf/hcl2cdk@0.19.2":
- version "0.19.2"
- resolved "https://registry.yarnpkg.com/@cdktf/hcl2cdk/-/hcl2cdk-0.19.2.tgz#1b3555e920f37994444479eda4c0635e8d1cdbd8"
- integrity sha512-v0UNRvvzuCi3SnmSAgBFAnWavT0ybR1AzkK8ndgfbB5JLDoNm0iJV0MOTURZF+I0O3V9u4RZsw4DVNPdil2EEA==
- dependencies:
- "@babel/generator" "^7.21.4"
- "@babel/template" "^7.20.7"
- "@babel/types" "^7.21.4"
- "@cdktf/commons" "0.19.2"
- "@cdktf/hcl2json" "0.19.2"
- "@cdktf/provider-generator" "0.19.2"
- "@cdktf/provider-schema" "0.19.2"
- camelcase "^6.3.0"
- deep-equal "^2.2.0"
- glob "^10.3.3"
- graphology "^0.25.1"
- graphology-types "^0.24.7"
- jsii-rosetta "^5.1.9"
- prettier "^2.8.6"
- reserved-words "^0.1.2"
- zod "^3.22.4"
-
-"@cdktf/hcl2json@0.19.2":
- version "0.19.2"
- resolved "https://registry.yarnpkg.com/@cdktf/hcl2json/-/hcl2json-0.19.2.tgz#0065981462a8bc48bd3bbf4d64a6c1c4ecbc801b"
- integrity sha512-gFj36AshWSyPKq/eNjQtMnWj0QM0tPtMulFpQ0FrB+eWj0GvxgWg9d65gGCZ8Y/o33VV/2Kv5l8VlDEYDW2S7Q==
- dependencies:
- fs-extra "^11.1.1"
+ "@cdktf/provider-schema" "0.20.0"
+ "@sentry/node" "7.91.0"
+ archiver "5.3.2"
+ cdktf "0.20.0"
+ chalk "4.1.2"
+ chokidar "3.5.3"
+ cli-spinners "2.9.2"
+ codemaker "1.93.0"
+ constructs "10.1.167"
+ cross-fetch "3.1.8"
+ cross-spawn "7.0.3"
+ detect-port "1.5.1"
+ execa "5.1.1"
+ extract-zip "2.0.1"
+ follow-redirects "1.15.4"
+ fs-extra "8.1.0"
+ https-proxy-agent "5.0.1"
+ indent-string "4.0.0"
+ ink "3.2.0"
+ ink-select-input "4.2.2"
+ ink-spinner "4.0.3"
+ ink-testing-library "2.1.0"
+ ink-use-stdout-dimensions "1.0.5"
+ jsii "5.3.3"
+ jsii-pacmak "1.93.0"
+ jsii-srcmak "0.1.999"
+ lodash.isequal "4.5.0"
+ log4js "6.9.1"
+ minimatch "5.1.6"
+ node-fetch "2.7.0"
+ open "7.4.2"
+ parse-gitignore "1.0.1"
+ pkg-up "3.1.0"
+ semver "7.5.4"
+ sscaff "1.2.274"
+ stream-buffers "3.0.2"
+ strip-ansi "6.0.1"
+ tunnel-agent "0.6.0"
+ uuid "8.3.2"
+ xml-js "1.6.11"
+ xstate "4.38.3"
+ yargs "17.7.2"
+ yoga-layout-prebuilt "1.10.0"
+ zod "3.22.4"
+
+"@cdktf/commons@0.20.0":
+ version "0.20.0"
+ resolved "https://registry.yarnpkg.com/@cdktf/commons/-/commons-0.20.0.tgz#36c4b187b94b8632b5ca0d155cfc642b7db89f1c"
+ integrity sha512-LWggwICmeN6XYfbwuOnO8rXr5lVKS6SzQFELLCXZZepFkRCQ5puR0BDKzObjaN6Fkh9eQj+hmLfyW2xwZkP0XA==
+ dependencies:
+ "@sentry/node" "7.92.0"
+ cdktf "0.20.0"
+ ci-info "3.9.0"
+ codemaker "1.93.0"
+ cross-spawn "7.0.3"
+ follow-redirects "1.15.4"
+ fs-extra "11.2.0"
+ is-valid-domain "0.1.6"
+ log4js "6.9.1"
+ strip-ansi "6.0.1"
+ uuid "9.0.1"
+
+"@cdktf/hcl-tools@0.20.0":
+ version "0.20.0"
+ resolved "https://registry.yarnpkg.com/@cdktf/hcl-tools/-/hcl-tools-0.20.0.tgz#a6b13ff20d9c76ddfea159e9f70f9f36080c29ee"
+ integrity sha512-nFhexOABmO3nOEjgaO3hC6oZFJ7YkFC9WtUOEsvUSJ44oemnfovR38KMyV1SD82r2X6QhRhxJMd6G1n6/iNxKw==
+
+"@cdktf/hcl2cdk@0.20.0":
+ version "0.20.0"
+ resolved "https://registry.yarnpkg.com/@cdktf/hcl2cdk/-/hcl2cdk-0.20.0.tgz#8ce637e829e5b9f06777c71a25cd3210e954053f"
+ integrity sha512-H9yEC5dtu1EajFKRaSje5ZVQCxjJtAJQHk2g9njoIoUAEl9jfcG2/MbYmeKUN/FBpdyl7gYS/ZweEuVdzcOVVg==
+ dependencies:
+ "@babel/generator" "7.21.4"
+ "@babel/template" "7.20.7"
+ "@babel/types" "7.21.4"
+ "@cdktf/commons" "0.20.0"
+ "@cdktf/hcl2json" "0.20.0"
+ "@cdktf/provider-generator" "0.20.0"
+ "@cdktf/provider-schema" "0.20.0"
+ camelcase "6.3.0"
+ cdktf "0.20.0"
+ codemaker "1.93.0"
+ deep-equal "2.2.0"
+ glob "10.3.3"
+ graphology "0.25.1"
+ graphology-types "0.24.7"
+ jsii-rosetta "5.3.2"
+ prettier "2.8.7"
+ reserved-words "0.1.2"
+ zod "3.22.4"
+
+"@cdktf/hcl2json@0.20.0":
+ version "0.20.0"
+ resolved "https://registry.yarnpkg.com/@cdktf/hcl2json/-/hcl2json-0.20.0.tgz#4c1d3f1169165d24ac4a229504210544bd90cd9d"
+ integrity sha512-de5GaNsJqKX934EJG5xnsbpvyVf99f36/7ScK53znsbamnTJvl791fCR/tgkF+zrmZYAw0QSSEX1bHY4IPluzw==
+ dependencies:
+ fs-extra "11.2.0"
"@cdktf/node-pty-prebuilt-multiarch@0.10.1-pre.11":
version "0.10.1-pre.11"
@@ -186,35 +203,36 @@
nan "^2.14.2"
prebuild-install "^7.1.1"
-"@cdktf/provider-generator@0.19.2":
- version "0.19.2"
- resolved "https://registry.yarnpkg.com/@cdktf/provider-generator/-/provider-generator-0.19.2.tgz#142c191458b2793617a57d53785acb7823357558"
- integrity sha512-e8fY/FtvlwMupp8zGGzAeAyW5yq4jhY+azL5kfXAXck2kO7hpimKflhycGGBm2aVTuOAmsmrumgEkCa6+7vmSg==
- dependencies:
- "@cdktf/commons" "0.19.2"
- "@cdktf/hcl2json" "0.19.2"
- "@cdktf/provider-schema" "0.19.2"
- "@types/node" "18.18.8"
- codemaker "^1.91.0"
- deepmerge "^4.3.1"
- fs-extra "^8.1.0"
- jsii-srcmak "^0.1.954"
+"@cdktf/provider-generator@0.20.0":
+ version "0.20.0"
+ resolved "https://registry.yarnpkg.com/@cdktf/provider-generator/-/provider-generator-0.20.0.tgz#27bdb2df126f15712ed0c77345ee99d33d12fd90"
+ integrity sha512-fpThfmaYOJhQv7uVSBKBfhbFpYpH6yEOunMq9tfmlf+rJhRaJ7+o5dIwqKmcLxOGn0OOyg55PzoPVG1rhoPJ3w==
+ dependencies:
+ "@cdktf/commons" "0.20.0"
+ "@cdktf/provider-schema" "0.20.0"
+ "@types/node" "18.19.5"
+ codemaker "1.93.0"
+ fs-extra "8.1.0"
+ glob "10.3.10"
+ jsii-srcmak "0.1.999"
"@cdktf/provider-project@^0.5.0":
- version "0.5.8"
- resolved "https://registry.yarnpkg.com/@cdktf/provider-project/-/provider-project-0.5.8.tgz#11d0f87c9b02aaf705eecd89f1f820cb0c7a3248"
- integrity sha512-tWQC5L4yh9w6GVYHTWMpmzBUcRKp6sdFO39sjUqFcKTt2yLy/MCbE/YRJ/gxX3ES4K/Fc+nk05DbbbnJkISxIQ==
+ version "0.5.9"
+ resolved "https://registry.yarnpkg.com/@cdktf/provider-project/-/provider-project-0.5.9.tgz#172ee61de1335f61f8065ba19d22a87a6030aa79"
+ integrity sha512-2IWkVmsc8TL0ZK18Tp4eGxB9H26Bt2bEXonD+aLAq33gmziq6CnYRbv9R9m2+u9Q3SIEOaE0VqofcsOZlgJkSw==
dependencies:
change-case "^4.1.2"
fs-extra "^10.1.0"
-"@cdktf/provider-schema@0.19.2":
- version "0.19.2"
- resolved "https://registry.yarnpkg.com/@cdktf/provider-schema/-/provider-schema-0.19.2.tgz#a97c07027b3293615994ed7b614e9214f45dede7"
- integrity sha512-d6YghOMsDPqQS8DRS+h5BMwg6I0QVwNi8iE9bX+pGXHa/hYggXE97sAMUGFcW3za+gSCOImHYvvKDVc3u3KsOA==
+"@cdktf/provider-schema@0.20.0":
+ version "0.20.0"
+ resolved "https://registry.yarnpkg.com/@cdktf/provider-schema/-/provider-schema-0.20.0.tgz#1954ce1d7046bc4f005d9b16d589caf84ad9ea3d"
+ integrity sha512-wUK4qxRQiGLVsxoF6UcFbMhpfJpaeCwAngn43wr3LwNwTCjogdMt+0aK2YgJsnov5pnt2kK6mVHMxmWhm7pgMw==
dependencies:
- "@cdktf/commons" "0.19.2"
- fs-extra "^11.1.1"
+ "@cdktf/commons" "0.20.0"
+ "@cdktf/hcl2json" "0.20.0"
+ deepmerge "4.3.1"
+ fs-extra "11.2.0"
"@fastify/busboy@^2.0.0":
version "2.1.0"
@@ -231,7 +249,7 @@
resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c"
integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==
-"@inquirer/checkbox@^1.3.4":
+"@inquirer/checkbox@^1.3.3":
version "1.5.0"
resolved "https://registry.yarnpkg.com/@inquirer/checkbox/-/checkbox-1.5.0.tgz#05869b4ee81e2c8d523799ef350d57cabd556bfa"
integrity sha512-3cKJkW1vIZAs4NaS0reFsnpAjP0azffYII4I2R7PTI7ZTMg5Y1at4vzXccOH3762b2c2L4drBhpJpf9uiaGNxA==
@@ -242,7 +260,7 @@
chalk "^4.1.2"
figures "^3.2.0"
-"@inquirer/confirm@^2.0.5":
+"@inquirer/confirm@^2.0.4":
version "2.0.15"
resolved "https://registry.yarnpkg.com/@inquirer/confirm/-/confirm-2.0.15.tgz#b5512ed190efd8c5b96e0969115756b48546ab36"
integrity sha512-hj8Q/z7sQXsF0DSpLQZVDhWYGN6KLM/gNjjqGkpKwBzljbQofGjn0ueHADy4HUY+OqDHmXuwk/bY+tZyIuuB0w==
@@ -251,7 +269,7 @@
"@inquirer/type" "^1.1.5"
chalk "^4.1.2"
-"@inquirer/core@^2.3.1":
+"@inquirer/core@^2.3.0":
version "2.3.1"
resolved "https://registry.yarnpkg.com/@inquirer/core/-/core-2.3.1.tgz#b7a1563ef3830a20485f551257779657e843e53f"
integrity sha512-faYAYnIfdEuns3jGKykaog5oUqFiEVbCx9nXGZfUhyEEpKcHt5bpJfZTb3eOBQKo8I/v4sJkZeBHmFlSZQuBCw==
@@ -291,7 +309,7 @@
strip-ansi "^6.0.1"
wrap-ansi "^6.2.0"
-"@inquirer/editor@^1.2.3":
+"@inquirer/editor@^1.2.2":
version "1.2.13"
resolved "https://registry.yarnpkg.com/@inquirer/editor/-/editor-1.2.13.tgz#94bddeeabc043d4a05fbde8523add4db221555d5"
integrity sha512-gBxjqt0B9GLN0j6M/tkEcmcIvB2fo9Cw0f5NRqDTkYyB9AaCzj7qvgG0onQ3GVPbMyMbbP4tWYxrBOaOdKpzNA==
@@ -301,7 +319,7 @@
chalk "^4.1.2"
external-editor "^3.1.0"
-"@inquirer/expand@^1.1.4":
+"@inquirer/expand@^1.1.3":
version "1.1.14"
resolved "https://registry.yarnpkg.com/@inquirer/expand/-/expand-1.1.14.tgz#d315014939d0bb82ed2b769907db5bd1922fb823"
integrity sha512-yS6fJ8jZYAsxdxuw2c8XTFMTvMR1NxZAw3LxDaFnqh7BZ++wTQ6rSp/2gGJhMacdZ85osb+tHxjVgx7F+ilv5g==
@@ -311,7 +329,7 @@
chalk "^4.1.2"
figures "^3.2.0"
-"@inquirer/input@^1.2.14", "@inquirer/input@^1.2.4":
+"@inquirer/input@^1.2.14", "@inquirer/input@^1.2.3":
version "1.2.14"
resolved "https://registry.yarnpkg.com/@inquirer/input/-/input-1.2.14.tgz#8951867618bb5cd16dd096e02404eec225a92207"
integrity sha512-tISLGpUKXixIQue7jypNEShrdzJoLvEvZOJ4QRsw5XTfrIYfoWFqAjMQLerGs9CzR86yAI89JR6snHmKwnNddw==
@@ -320,7 +338,7 @@
"@inquirer/type" "^1.1.5"
chalk "^4.1.2"
-"@inquirer/password@^1.1.4":
+"@inquirer/password@^1.1.3":
version "1.1.14"
resolved "https://registry.yarnpkg.com/@inquirer/password/-/password-1.1.14.tgz#c1fc139efe84a38986870a1bcf80718050f82bbf"
integrity sha512-vL2BFxfMo8EvuGuZYlryiyAB3XsgtbxOcFs4H9WI9szAS/VZCAwdVqs8rqEeaAf/GV/eZOghIOYxvD91IsRWSg==
@@ -330,22 +348,22 @@
ansi-escapes "^4.3.2"
chalk "^4.1.2"
-"@inquirer/prompts@^2.3.0":
- version "2.3.1"
- resolved "https://registry.yarnpkg.com/@inquirer/prompts/-/prompts-2.3.1.tgz#fe430f96e510cf352efeb77af2dbd6d3049e677c"
- integrity sha512-YQeBFzIE+6fcec5N/U2mSz+IcKEG4wtGDwF7MBLIDgITWzB3o723JpKJ1rxWqdCvTXkYE+gDXK/seSN6omo3DQ==
- dependencies:
- "@inquirer/checkbox" "^1.3.4"
- "@inquirer/confirm" "^2.0.5"
- "@inquirer/core" "^2.3.1"
- "@inquirer/editor" "^1.2.3"
- "@inquirer/expand" "^1.1.4"
- "@inquirer/input" "^1.2.4"
- "@inquirer/password" "^1.1.4"
- "@inquirer/rawlist" "^1.2.4"
- "@inquirer/select" "^1.2.4"
-
-"@inquirer/rawlist@^1.2.4":
+"@inquirer/prompts@2.3.0":
+ version "2.3.0"
+ resolved "https://registry.yarnpkg.com/@inquirer/prompts/-/prompts-2.3.0.tgz#b3f13d58c9c4d88b84af62ab582363fa410db8d6"
+ integrity sha512-x79tSDIZAibOl9WaBoOuyaQqNnisOO8Pk0qWyulP/nPaD/WkoRvkzk7hR4WTRmWAyE8CNbjdYgGltvd0qmvCGQ==
+ dependencies:
+ "@inquirer/checkbox" "^1.3.3"
+ "@inquirer/confirm" "^2.0.4"
+ "@inquirer/core" "^2.3.0"
+ "@inquirer/editor" "^1.2.2"
+ "@inquirer/expand" "^1.1.3"
+ "@inquirer/input" "^1.2.3"
+ "@inquirer/password" "^1.1.3"
+ "@inquirer/rawlist" "^1.2.3"
+ "@inquirer/select" "^1.2.3"
+
+"@inquirer/rawlist@^1.2.3":
version "1.2.14"
resolved "https://registry.yarnpkg.com/@inquirer/rawlist/-/rawlist-1.2.14.tgz#7fac491345a984bafad96817a4f5ae45fb6b0c96"
integrity sha512-xIYmDpYgfz2XGCKubSDLKEvadkIZAKbehHdWF082AyC2I4eHK44RUfXaoOAqnbqItZq4KHXS6jDJ78F2BmQvxg==
@@ -354,7 +372,7 @@
"@inquirer/type" "^1.1.5"
chalk "^4.1.2"
-"@inquirer/select@^1.2.4":
+"@inquirer/select@^1.2.3":
version "1.3.1"
resolved "https://registry.yarnpkg.com/@inquirer/select/-/select-1.3.1.tgz#b10bb8d4ba72f08eb887b3d948eb734d680897c6"
integrity sha512-EgOPHv7XOHEqiBwBJTyiMg9r57ySyW4oyYCumGp+pGyOaXQaLb2kTnccWI6NFd9HSi5kDJhF7YjA+3RfMQJ2JQ==
@@ -492,6 +510,25 @@
resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33"
integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==
+"@sentry-internal/tracing@7.64.0":
+ version "7.64.0"
+ resolved "https://registry.yarnpkg.com/@sentry-internal/tracing/-/tracing-7.64.0.tgz#3e110473b8edf805b799cc91d6ee592830237bb4"
+ integrity sha512-1XE8W6ki7hHyBvX9hfirnGkKDBKNq3bDJyXS86E0bYVDl94nvbRM9BD9DHsCFetqYkVm1yDGEK+6aUVs4CztoQ==
+ dependencies:
+ "@sentry/core" "7.64.0"
+ "@sentry/types" "7.64.0"
+ "@sentry/utils" "7.64.0"
+ tslib "^2.4.1 || ^1.9.3"
+
+"@sentry-internal/tracing@7.91.0":
+ version "7.91.0"
+ resolved "https://registry.yarnpkg.com/@sentry-internal/tracing/-/tracing-7.91.0.tgz#fbb6e1e3383e1eeee08633384e004da73ac1c37d"
+ integrity sha512-JH5y6gs6BS0its7WF2DhySu7nkhPDfZcdpAXldxzIlJpqFkuwQKLU5nkYJpiIyZz1NHYYtW5aum2bV2oCOdDRA==
+ dependencies:
+ "@sentry/core" "7.91.0"
+ "@sentry/types" "7.91.0"
+ "@sentry/utils" "7.91.0"
+
"@sentry-internal/tracing@7.92.0":
version "7.92.0"
resolved "https://registry.yarnpkg.com/@sentry-internal/tracing/-/tracing-7.92.0.tgz#505d94a93b5df965ec6bfb35da43389988259d4d"
@@ -501,6 +538,23 @@
"@sentry/types" "7.92.0"
"@sentry/utils" "7.92.0"
+"@sentry/core@7.64.0":
+ version "7.64.0"
+ resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.64.0.tgz#9d61cdc29ba299dedbdcbe01cfadf94bd0b7df48"
+ integrity sha512-IzmEyl5sNG7NyEFiyFHEHC+sizsZp9MEw1+RJRLX6U5RITvcsEgcajSkHQFafaBPzRrcxZMdm47Cwhl212LXcw==
+ dependencies:
+ "@sentry/types" "7.64.0"
+ "@sentry/utils" "7.64.0"
+ tslib "^2.4.1 || ^1.9.3"
+
+"@sentry/core@7.91.0":
+ version "7.91.0"
+ resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.91.0.tgz#229334d7f03dd5d90a17495e61ce4215ab730b2a"
+ integrity sha512-tu+gYq4JrTdrR+YSh5IVHF0fJi/Pi9y0HZ5H9HnYy+UMcXIotxf6hIEaC6ZKGeLWkGXffz2gKpQLe/g6vy/lPA==
+ dependencies:
+ "@sentry/types" "7.91.0"
+ "@sentry/utils" "7.91.0"
+
"@sentry/core@7.92.0":
version "7.92.0"
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.92.0.tgz#4e74c1959348b698226c49ead7a24e165502b55c"
@@ -509,7 +563,32 @@
"@sentry/types" "7.92.0"
"@sentry/utils" "7.92.0"
-"@sentry/node@^7.64.0", "@sentry/node@^7.77.0":
+"@sentry/node@7.64.0":
+ version "7.64.0"
+ resolved "https://registry.yarnpkg.com/@sentry/node/-/node-7.64.0.tgz#c6f7a67c1442324298f0525e7191bc18572ee1ce"
+ integrity sha512-wRi0uTnp1WSa83X2yLD49tV9QPzGh5e42IKdIDBiQ7lV9JhLILlyb34BZY1pq6p4dp35yDasDrP3C7ubn7wo6A==
+ dependencies:
+ "@sentry-internal/tracing" "7.64.0"
+ "@sentry/core" "7.64.0"
+ "@sentry/types" "7.64.0"
+ "@sentry/utils" "7.64.0"
+ cookie "^0.4.1"
+ https-proxy-agent "^5.0.0"
+ lru_map "^0.3.3"
+ tslib "^2.4.1 || ^1.9.3"
+
+"@sentry/node@7.91.0":
+ version "7.91.0"
+ resolved "https://registry.yarnpkg.com/@sentry/node/-/node-7.91.0.tgz#26bf13c3daf988f9725afd1a3cc38ba2ff90d62a"
+ integrity sha512-hTIfSQxD7L+AKIqyjoq8CWBRkEQrrMZmA3GSZgPI5JFWBHgO0HBo5TH/8TU81oEJh6kqqHAl2ObMhmcnaFqlzg==
+ dependencies:
+ "@sentry-internal/tracing" "7.91.0"
+ "@sentry/core" "7.91.0"
+ "@sentry/types" "7.91.0"
+ "@sentry/utils" "7.91.0"
+ https-proxy-agent "^5.0.0"
+
+"@sentry/node@7.92.0":
version "7.92.0"
resolved "https://registry.yarnpkg.com/@sentry/node/-/node-7.92.0.tgz#880d3be5cb8ef805a6856c619db3951b1678f726"
integrity sha512-LZeQL1r6kikEoOzA9K61OmMl32/lK/6PzmFNDH6z7UYwQopCZgVA6IP+CZuln8K2ys5c9hCyF7ICQMysXfpNJA==
@@ -520,11 +599,36 @@
"@sentry/utils" "7.92.0"
https-proxy-agent "^5.0.0"
+"@sentry/types@7.64.0":
+ version "7.64.0"
+ resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.64.0.tgz#21fc545ea05c3c8c4c3e518583eca1a8c5429506"
+ integrity sha512-LqjQprWXjUFRmzIlUjyA+KL+38elgIYmAeoDrdyNVh8MK5IC1W2Lh1Q87b4yOiZeMiIhIVNBd7Ecoh2rodGrGA==
+
+"@sentry/types@7.91.0":
+ version "7.91.0"
+ resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.91.0.tgz#5b68954e08986fecb0d4bef168df58eef62c32c7"
+ integrity sha512-bcQnb7J3P3equbCUc+sPuHog2Y47yGD2sCkzmnZBjvBT0Z1B4f36fI/5WjyZhTjLSiOdg3F2otwvikbMjmBDew==
+
"@sentry/types@7.92.0":
version "7.92.0"
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.92.0.tgz#4c308fdb316c0272f55f0816230fe87e7b9b551a"
integrity sha512-APmSOuZuoRGpbPpPeYIbMSplPjiWNLZRQa73QiXuTflW4Tu/ItDlU8hOa2+A6JKVkJCuD2EN6yUrxDGSMyNXeg==
+"@sentry/utils@7.64.0":
+ version "7.64.0"
+ resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.64.0.tgz#6fe3ce9a56d3433ed32119f914907361a54cc184"
+ integrity sha512-HRlM1INzK66Gt+F4vCItiwGKAng4gqzCR4C5marsL3qv6SrKH98dQnCGYgXluSWaaa56h97FRQu7TxCk6jkSvQ==
+ dependencies:
+ "@sentry/types" "7.64.0"
+ tslib "^2.4.1 || ^1.9.3"
+
+"@sentry/utils@7.91.0":
+ version "7.91.0"
+ resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.91.0.tgz#3b1a94c053c885877908cd3e1365e3d23e21a73f"
+ integrity sha512-fvxjrEbk6T6Otu++Ax9ntlQ0sGRiwSC179w68aC3u26Wr30FAIRKqHTCCdc2jyWk7Gd9uWRT/cq+g8NG/8BfSg==
+ dependencies:
+ "@sentry/types" "7.91.0"
+
"@sentry/utils@7.92.0":
version "7.92.0"
resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.92.0.tgz#20ed29742594eab007f9ff72e008b5262456a319"
@@ -571,14 +675,7 @@
dependencies:
undici-types "~5.26.4"
-"@types/node@18.18.8":
- version "18.18.8"
- resolved "https://registry.yarnpkg.com/@types/node/-/node-18.18.8.tgz#2b285361f2357c8c8578ec86b5d097c7f464cfd6"
- integrity sha512-OLGBaaK5V3VRBS1bAkMVP2/W9B+H8meUfl866OrMNQqt7wDgdpWPp5o6gmIc9pB+lIQHSq4ZL8ypeH1vPxcPaQ==
- dependencies:
- undici-types "~5.26.4"
-
-"@types/node@^18":
+"@types/node@18.19.5", "@types/node@^18":
version "18.19.5"
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.5.tgz#4b23a9ab8ab7dafebb57bcbaf5c3d8d04f9d8cac"
integrity sha512-22MG6T02Hos2JWfa1o5jsIByn+bc5iOt1IS4xyg6OG68Bu+wMonVZzdrgCw693++rpLE9RUT/Bx15BeDzO0j+g==
@@ -735,7 +832,7 @@ archiver-utils@^3.0.4:
normalize-path "^3.0.0"
readable-stream "^3.6.0"
-archiver@5.3.2, archiver@^5.3.1:
+archiver@5.3.2:
version "5.3.2"
resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.2.tgz#99991d5957e53bd0303a392979276ac4ddccf3b0"
integrity sha512-+25nxyyznAXF7Nef3y0EbBeqmGZgeN/BxHX29Rs39djAfaFalmQ89SE6CWyDCHzGL0yt/ycBtNOmGTW0FyGWNw==
@@ -760,14 +857,6 @@ arr-rotate@^1.0.0:
resolved "https://registry.yarnpkg.com/arr-rotate/-/arr-rotate-1.0.0.tgz#c11877d06a0a42beb39ab8956a06779d9b71d248"
integrity sha512-yOzOZcR9Tn7enTF66bqKorGGH0F36vcPaSWg8fO0c0UYb3LX3VMXj5ZxEqQLNOecAhlRJ7wYZja5i4jTlnbIfQ==
-array-buffer-byte-length@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead"
- integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==
- dependencies:
- call-bind "^1.0.2"
- is-array-buffer "^3.0.1"
-
array-ify@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/array-ify/-/array-ify-1.0.0.tgz#9e528762b4a9066ad163a6962a364418e9626ece"
@@ -898,16 +987,16 @@ camelcase-keys@^6.2.2:
map-obj "^4.0.0"
quick-lru "^4.0.1"
+camelcase@6.3.0, camelcase@^6.3.0:
+ version "6.3.0"
+ resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
+ integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
+
camelcase@^5.0.0, camelcase@^5.3.1:
version "5.3.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
-camelcase@^6.3.0:
- version "6.3.0"
- resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a"
- integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
-
capital-case@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/capital-case/-/capital-case-1.0.4.tgz#9d130292353c9249f6b00fa5852bee38a717e669"
@@ -922,45 +1011,54 @@ case@^1.6.3:
resolved "https://registry.yarnpkg.com/case/-/case-1.6.3.tgz#0a4386e3e9825351ca2e6216c60467ff5f1ea1c9"
integrity sha512-mzDSXIPaFwVDvZAHqZ9VlbyF4yyXRuX6IvB06WvPYkqJVO24kX1PPhv9bfpKNFZyxYFmmgo03HUiD8iklmJYRQ==
-cdktf-cli@^0.19.0:
- version "0.19.2"
- resolved "https://registry.yarnpkg.com/cdktf-cli/-/cdktf-cli-0.19.2.tgz#09fcab9dcc3eeca7b45c36687467b56b9d4f231a"
- integrity sha512-rwd0yOJmHecOnQsnZxsKFgBO2r1AuPw34IfKSx+FSK6H7aO13Pak+tef6tlhn7f0K79Abk2ZGD3OLs8TeG+78w==
- dependencies:
- "@cdktf/cli-core" "0.19.2"
- "@cdktf/commons" "0.19.2"
- "@cdktf/hcl2cdk" "0.19.2"
- "@cdktf/hcl2json" "0.19.2"
- "@inquirer/prompts" "^2.3.0"
- "@sentry/node" "^7.64.0"
- cdktf "0.19.2"
- ci-info "^3.8.0"
- codemaker "^1.87.0"
- constructs "^10.0.25"
- cross-spawn "^7.0.3"
- https-proxy-agent "^5.0.1"
- ink-select-input "^4.2.1"
- ink-table "^3.0.0"
- jsii "^5.1.10"
- jsii-pacmak "^1.87.0"
- minimatch "^5.1.0"
- node-fetch "^2.6.7"
- pidtree "^0.6.0"
- pidusage "^3.0.2"
- tunnel-agent "^0.6.0"
- xml-js "^1.6.11"
- yargs "^17.6"
- yoga-layout-prebuilt "^1.10.0"
- zod "^3.22.4"
-
-cdktf@0.19.2, cdktf@^0.19.0:
- version "0.19.2"
- resolved "https://registry.yarnpkg.com/cdktf/-/cdktf-0.19.2.tgz#c93b794a9c8ac6b4e50bc24e80d06d84089a8766"
- integrity sha512-FHOERDO7i2g/+pUaaZCVDKsbXEBtWYOgELL1UKjNp37DyEmtFlltdsgutVfouoil0C7W5za2IydD6sSeoH5aUw==
+cdktf-cli@^0.20.0:
+ version "0.20.0"
+ resolved "https://registry.yarnpkg.com/cdktf-cli/-/cdktf-cli-0.20.0.tgz#790ea48e126770d0f1a4310b65ddadeaedd3a1d7"
+ integrity sha512-FN04OqrbUGE9VxqnGHRTWI0vq95LtoFAG/hrP7asFOmZFq5W5XBb+gdl9PgHUoajnl0WQnQHcS8xUQhKNhXiUQ==
+ dependencies:
+ "@cdktf/cli-core" "0.20.0"
+ "@cdktf/commons" "0.20.0"
+ "@cdktf/hcl-tools" "0.20.0"
+ "@cdktf/hcl2cdk" "0.20.0"
+ "@cdktf/hcl2json" "0.20.0"
+ "@inquirer/prompts" "2.3.0"
+ "@sentry/node" "7.64.0"
+ cdktf "0.20.0"
+ ci-info "3.8.0"
+ codemaker "1.93.0"
+ constructs "10.1.167"
+ cross-spawn "7.0.3"
+ https-proxy-agent "5.0.1"
+ ink-select-input "4.2.1"
+ ink-table "3.0.0"
+ jsii "5.3.2"
+ jsii-pacmak "1.93.0"
+ minimatch "5.1.0"
+ node-fetch "2.6.7"
+ pidtree "0.6.0"
+ pidusage "3.0.2"
+ tunnel-agent "0.6.0"
+ xml-js "1.6.11"
+ yargs "17.6.2"
+ yoga-layout-prebuilt "1.10.0"
+ zod "3.22.4"
+
+cdktf@0.20.0, cdktf@^0.20.0:
+ version "0.20.0"
+ resolved "https://registry.yarnpkg.com/cdktf/-/cdktf-0.20.0.tgz#fae129c9431350ebbf02a95129db6687ee323e43"
+ integrity sha512-4bCG1/7cYYGyiC++zxJ5wPUZVuxeXvZLY2BqE9heTV/PZtlcwUrV6wfUMtQFbYTCssCEJDnGrOe7Rw1Jf0Sf2w==
dependencies:
archiver "5.3.2"
- json-stable-stringify "^1.0.2"
- semver "^7.5.4"
+ json-stable-stringify "1.1.0"
+ semver "7.5.4"
+
+chalk@4.1.2, chalk@^4, chalk@^4.1.0, chalk@^4.1.2:
+ version "4.1.2"
+ resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
+ integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
+ dependencies:
+ ansi-styles "^4.1.0"
+ supports-color "^7.1.0"
chalk@^2.4.2:
version "2.4.2"
@@ -971,14 +1069,6 @@ chalk@^2.4.2:
escape-string-regexp "^1.0.5"
supports-color "^5.3.0"
-chalk@^4, chalk@^4.1.0, chalk@^4.1.2:
- version "4.1.2"
- resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
- integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
- dependencies:
- ansi-styles "^4.1.0"
- supports-color "^7.1.0"
-
change-case@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/change-case/-/change-case-4.1.2.tgz#fedfc5f136045e2398c0410ee441f95704641e12"
@@ -1002,7 +1092,7 @@ chardet@^0.7.0:
resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e"
integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==
-chokidar@^3.5.3:
+chokidar@3.5.3:
version "3.5.3"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd"
integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==
@@ -1022,16 +1112,21 @@ chownr@^1.1.1:
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b"
integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==
-ci-info@^2.0.0:
- version "2.0.0"
- resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46"
- integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==
+ci-info@3.8.0:
+ version "3.8.0"
+ resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91"
+ integrity sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==
-ci-info@^3.8.0, ci-info@^3.9.0:
+ci-info@3.9.0:
version "3.9.0"
resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4"
integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==
+ci-info@^2.0.0:
+ version "2.0.0"
+ resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46"
+ integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==
+
cli-boxes@^2.2.0:
version "2.2.1"
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f"
@@ -1044,12 +1139,7 @@ cli-cursor@^3.1.0:
dependencies:
restore-cursor "^3.1.0"
-cli-spinners@2.7.0:
- version "2.7.0"
- resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.7.0.tgz#f815fd30b5f9eaac02db604c7a231ed7cb2f797a"
- integrity sha512-qu3pN8Y3qHNgE2AFweciB1IfMnmZ/fsNTEE+NOFjmGB2F/7rLhnhzppvpCnN4FovtP26k8lHyy9ptEbNwWFLzw==
-
-cli-spinners@^2.3.0, cli-spinners@^2.8.0, cli-spinners@^2.9.1:
+cli-spinners@2.9.2, cli-spinners@^2.3.0, cli-spinners@^2.8.0, cli-spinners@^2.9.1:
version "2.9.2"
resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41"
integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==
@@ -1106,7 +1196,7 @@ code-excerpt@^3.0.0:
dependencies:
convert-to-spaces "^1.0.1"
-codemaker@^1.86.1, codemaker@^1.87.0, codemaker@^1.91.0, codemaker@^1.93.0:
+codemaker@1.93.0, codemaker@^1.93.0:
version "1.93.0"
resolved "https://registry.yarnpkg.com/codemaker/-/codemaker-1.93.0.tgz#4cd42eaf789678cc996581baf8ca014b505e72b4"
integrity sha512-n9AdncxhGti20YhA7HI2oAYhELh/qlDnW9JIAYQW9iULXdeaKtsxHgvcwBCltpieOcQrq10bt+sUawBs62vxLg==
@@ -1202,7 +1292,12 @@ constant-case@^3.0.4:
tslib "^2.0.3"
upper-case "^2.0.2"
-constructs@^10.0.0, constructs@^10.0.25, constructs@^10.3.0:
+constructs@10.1.167:
+ version "10.1.167"
+ resolved "https://registry.yarnpkg.com/constructs/-/constructs-10.1.167.tgz#7012962af80a53124cc8bf461184149a3e2a944a"
+ integrity sha512-zGt88EmcJUtWbd/sTM9GKcHRjYWzEx5jzMYuK69vl25Dj01sJAc7uF6AEJgZBtlLAc3VnRUvzgitHwmJkS9BFw==
+
+constructs@^10.0.0, constructs@^10.3.0:
version "10.3.0"
resolved "https://registry.yarnpkg.com/constructs/-/constructs-10.3.0.tgz#4c246fce9cf8e77711ad45944e9fbd41f1501965"
integrity sha512-vbK8i3rIb/xwZxSpTjz3SagHn1qq9BChLEfy5Hf6fB3/2eFbrwt2n9kHwQcS0CPTRBesreeAcsJfMq2229FnbQ==
@@ -1375,6 +1470,11 @@ convert-to-spaces@^1.0.1:
resolved "https://registry.yarnpkg.com/convert-to-spaces/-/convert-to-spaces-1.0.2.tgz#7e3e48bbe6d997b1417ddca2868204b4d3d85715"
integrity sha512-cj09EBuObp9gZNQCzc7hByQyrs6jVGE+o9kSJmeUoj+GiPiJvi5LYqEH/Hmme4+MTLHM+Ejtq+FChpjjEnsPdQ==
+cookie@^0.4.1:
+ version "0.4.2"
+ resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432"
+ integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==
+
core-util-is@^1.0.3, core-util-is@~1.0.0:
version "1.0.3"
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
@@ -1393,14 +1493,14 @@ crc32-stream@^4.0.2:
crc-32 "^1.2.0"
readable-stream "^3.4.0"
-cross-fetch@^3.1.5:
+cross-fetch@3.1.8:
version "3.1.8"
resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.8.tgz#0327eba65fd68a7d119f8fb2bf9334a1a7956f82"
integrity sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg==
dependencies:
node-fetch "^2.6.12"
-cross-spawn@^7.0.0, cross-spawn@^7.0.3:
+cross-spawn@7.0.3, cross-spawn@^7.0.0, cross-spawn@^7.0.3:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
@@ -1456,17 +1556,16 @@ decompress-response@^6.0.0:
dependencies:
mimic-response "^3.1.0"
-deep-equal@^2.2.0:
- version "2.2.3"
- resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.2.3.tgz#af89dafb23a396c7da3e862abc0be27cf51d56e1"
- integrity sha512-ZIwpnevOurS8bpT4192sqAowWM76JDKSHYzMLty3BZGSswgq6pBaH3DhCSW5xVAZICZyKdOBPjwww5wfgT/6PA==
+deep-equal@2.2.0:
+ version "2.2.0"
+ resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.2.0.tgz#5caeace9c781028b9ff459f33b779346637c43e6"
+ integrity sha512-RdpzE0Hv4lhowpIUKKMJfeH6C1pXdtT1/it80ubgWqwI3qpuxUBpC1S4hnHg+zjnuOoDkzUtUCEEkG+XG5l3Mw==
dependencies:
- array-buffer-byte-length "^1.0.0"
- call-bind "^1.0.5"
- es-get-iterator "^1.1.3"
- get-intrinsic "^1.2.2"
+ call-bind "^1.0.2"
+ es-get-iterator "^1.1.2"
+ get-intrinsic "^1.1.3"
is-arguments "^1.1.1"
- is-array-buffer "^3.0.2"
+ is-array-buffer "^3.0.1"
is-date-object "^1.0.5"
is-regex "^1.1.4"
is-shared-array-buffer "^1.0.2"
@@ -1474,18 +1573,18 @@ deep-equal@^2.2.0:
object-is "^1.1.5"
object-keys "^1.1.1"
object.assign "^4.1.4"
- regexp.prototype.flags "^1.5.1"
+ regexp.prototype.flags "^1.4.3"
side-channel "^1.0.4"
which-boxed-primitive "^1.0.2"
which-collection "^1.0.1"
- which-typed-array "^1.1.13"
+ which-typed-array "^1.1.9"
deep-extend@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==
-deepmerge@^4.3.1:
+deepmerge@4.3.1:
version "4.3.1"
resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a"
integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==
@@ -1533,7 +1632,7 @@ detect-newline@^3.1.0:
resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651"
integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==
-detect-port@^1.5.1:
+detect-port@1.5.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/detect-port/-/detect-port-1.5.1.tgz#451ca9b6eaf20451acb0799b8ab40dff7718727b"
integrity sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ==
@@ -1607,7 +1706,7 @@ error-ex@^1.3.1:
dependencies:
is-arrayish "^0.2.1"
-es-get-iterator@^1.1.3:
+es-get-iterator@^1.1.2:
version "1.1.3"
resolved "https://registry.yarnpkg.com/es-get-iterator/-/es-get-iterator-1.1.3.tgz#3ef87523c5d464d41084b2c3c9c214f1199763d6"
integrity sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==
@@ -1652,7 +1751,7 @@ events@^3.3.0:
resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400"
integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
-execa@^5.1.1:
+execa@5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd"
integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==
@@ -1681,7 +1780,7 @@ external-editor@^3.1.0:
iconv-lite "^0.4.24"
tmp "^0.0.33"
-extract-zip@^2.0.1:
+extract-zip@2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a"
integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg==
@@ -1776,7 +1875,7 @@ flatted@^3.2.7:
resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.9.tgz#7eb4c67ca1ba34232ca9d2d93e9886e611ad7daf"
integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==
-follow-redirects@^1.15.2, follow-redirects@^1.15.3:
+follow-redirects@1.15.4:
version "1.15.4"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.4.tgz#cdc7d308bf6493126b17ea2191ea0ccf3e535adf"
integrity sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==
@@ -1801,16 +1900,7 @@ fs-constants@^1.0.0:
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
-fs-extra@^10.1.0:
- version "10.1.0"
- resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf"
- integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==
- dependencies:
- graceful-fs "^4.2.0"
- jsonfile "^6.0.1"
- universalify "^2.0.0"
-
-fs-extra@^11.1.1:
+fs-extra@11.2.0:
version "11.2.0"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.2.0.tgz#e70e17dfad64232287d01929399e0ea7c86b0e5b"
integrity sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==
@@ -1819,7 +1909,7 @@ fs-extra@^11.1.1:
jsonfile "^6.0.1"
universalify "^2.0.0"
-fs-extra@^8.1.0:
+fs-extra@8.1.0, fs-extra@^8.1.0:
version "8.1.0"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0"
integrity sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==
@@ -1828,6 +1918,15 @@ fs-extra@^8.1.0:
jsonfile "^4.0.0"
universalify "^0.1.0"
+fs-extra@^10.1.0:
+ version "10.1.0"
+ resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf"
+ integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==
+ dependencies:
+ graceful-fs "^4.2.0"
+ jsonfile "^6.0.1"
+ universalify "^2.0.0"
+
fs-extra@^9.1.0:
version "9.1.0"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d"
@@ -1948,7 +2047,7 @@ glob-promise@^6.0.5:
dependencies:
"@types/glob" "^8.0.0"
-glob@^10.3.3:
+glob@10.3.10:
version "10.3.10"
resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.10.tgz#0351ebb809fd187fe421ab96af83d3a70715df4b"
integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==
@@ -1959,6 +2058,17 @@ glob@^10.3.3:
minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
path-scurry "^1.10.1"
+glob@10.3.3:
+ version "10.3.3"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.3.tgz#8360a4ffdd6ed90df84aa8d52f21f452e86a123b"
+ integrity sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==
+ dependencies:
+ foreground-child "^3.1.0"
+ jackspeak "^2.0.3"
+ minimatch "^9.0.1"
+ minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
+ path-scurry "^1.10.1"
+
glob@^7.0.0, glob@^7.1.4, glob@^7.2.3:
version "7.2.3"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b"
@@ -1994,15 +2104,15 @@ graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0:
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
-graphology-types@^0.24.7:
+graphology-types@0.24.7:
version "0.24.7"
resolved "https://registry.yarnpkg.com/graphology-types/-/graphology-types-0.24.7.tgz#7d630a800061666bfa70066310f56612e08b7bee"
integrity sha512-tdcqOOpwArNjEr0gNQKCXwaNCWnQJrog14nJNQPeemcLnXQUUGrsCWpWkVKt46zLjcS6/KGoayeJfHHyPDlvwA==
-graphology@^0.25.1:
- version "0.25.4"
- resolved "https://registry.yarnpkg.com/graphology/-/graphology-0.25.4.tgz#e528a64555ac1f392a9d965321ada5b2b843efe1"
- integrity sha512-33g0Ol9nkWdD6ulw687viS8YJQBxqG5LWII6FI6nul0pq6iM2t5EKquOTFDbyTblRB3O9I+7KX4xI8u5ffekAQ==
+graphology@0.25.1:
+ version "0.25.1"
+ resolved "https://registry.yarnpkg.com/graphology/-/graphology-0.25.1.tgz#f92b86294782522d3898ce4480e4a577c0c2568a"
+ integrity sha512-yYA7BJCcXN2DrKNQQ9Qf22zBHm/yTbyBR71T1MYBbGtywNHsv0QZtk8zaR6zxNcp2hCCZayUkHp9DyMSZCpoxQ==
dependencies:
events "^3.3.0"
obliterator "^2.0.2"
@@ -2095,7 +2205,7 @@ hosted-git-info@^4.0.0, hosted-git-info@^4.0.1:
dependencies:
lru-cache "^6.0.0"
-https-proxy-agent@^5.0.0, https-proxy-agent@^5.0.1:
+https-proxy-agent@5.0.1, https-proxy-agent@^5.0.0:
version "5.0.1"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6"
integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==
@@ -2120,7 +2230,7 @@ ieee754@^1.1.13:
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==
-indent-string@^4.0.0:
+indent-string@4.0.0, indent-string@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251"
integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==
@@ -2148,7 +2258,16 @@ ini@^2.0.0:
resolved "https://registry.yarnpkg.com/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5"
integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==
-ink-select-input@^4.2.1:
+ink-select-input@4.2.1:
+ version "4.2.1"
+ resolved "https://registry.yarnpkg.com/ink-select-input/-/ink-select-input-4.2.1.tgz#121108ccbcb42aa619f9f0baedb796c24c971a2a"
+ integrity sha512-WvlrYdwmdnD6/nE/9mNhaaanTQOKmwy/hT/vuAqbDec3PUQBQ8Pkwszii/8eGvDTx5bGiUHu18P9D5IoB/ERaw==
+ dependencies:
+ arr-rotate "^1.0.0"
+ figures "^3.2.0"
+ lodash.isequal "^4.5.0"
+
+ink-select-input@4.2.2:
version "4.2.2"
resolved "https://registry.yarnpkg.com/ink-select-input/-/ink-select-input-4.2.2.tgz#484672a648b6b214052de721f02a645500cb75d3"
integrity sha512-E5AS2Vnd4CSzEa7Rm+hG47wxRQo1ASfh4msKxO7FHmn/ym+GKSSsFIfR+FonqjKNDPXYJClw8lM47RdN3Pi+nw==
@@ -2157,31 +2276,31 @@ ink-select-input@^4.2.1:
figures "^3.2.0"
lodash.isequal "^4.5.0"
-ink-spinner@^4.0.3:
+ink-spinner@4.0.3:
version "4.0.3"
resolved "https://registry.yarnpkg.com/ink-spinner/-/ink-spinner-4.0.3.tgz#0d0f4a787ae1a4270928e063d9c52527cb264feb"
integrity sha512-uJ4nbH00MM9fjTJ5xdw0zzvtXMkeGb0WV6dzSWvFv2/+ks6FIhpkt+Ge/eLdh0Ah6Vjw5pLMyNfoHQpRDRVFbQ==
dependencies:
cli-spinners "^2.3.0"
-ink-table@^3.0.0:
- version "3.1.0"
- resolved "https://registry.yarnpkg.com/ink-table/-/ink-table-3.1.0.tgz#d450624e344702b5cb7cadcb25783c58555c7c54"
- integrity sha512-qxVb4DIaEaJryvF9uZGydnmP9Hkmas3DCKVpEcBYC0E4eJd3qNgNe+PZKuzgCERFe9LfAS1TNWxCr9+AU4v3YA==
+ink-table@3.0.0:
+ version "3.0.0"
+ resolved "https://registry.yarnpkg.com/ink-table/-/ink-table-3.0.0.tgz#109fb2ce0709567f0e38d14b2b82f311277a3628"
+ integrity sha512-RtcYjenHKZWjnwVNQ6zSYWMOLKwkWscDAJsqUQXftyjkYho1gGrluGss87NOoIzss0IKr74lKasd6MtlQYALiA==
dependencies:
object-hash "^2.0.3"
-ink-testing-library@^2.1.0:
+ink-testing-library@2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/ink-testing-library/-/ink-testing-library-2.1.0.tgz#b5ffd1ef1049550ae4d2f008b8770e7ece6e0313"
integrity sha512-7TNlOjJlJXB33vG7yVa+MMO7hCjaC1bCn+zdpSjknWoLbOWMaFdKc7LJvqVkZ0rZv2+akhjXPrcR/dbxissjUw==
-ink-use-stdout-dimensions@^1.0.5:
+ink-use-stdout-dimensions@1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/ink-use-stdout-dimensions/-/ink-use-stdout-dimensions-1.0.5.tgz#7739876c00284840601c4150aa84eb7adc143de2"
integrity sha512-rVsqnw4tQEAJUoknU09+zHdDf30GJdkumkHr0iz/TOYMYEZJkYqziQSGJAM+Z+M603EDfO89+Nxyn/Ko2Zknfw==
-ink@^3.2.0:
+ink@3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/ink/-/ink-3.2.0.tgz#434793630dc57d611c8fe8fffa1db6b56f1a16bb"
integrity sha512-firNp1q3xxTzoItj/eOOSZQnYSlyrWks5llCTVX37nJ59K3eXbQ8PtzCguqo8YI19EELo5QxaKnJd4VxzhU8tg==
@@ -2232,7 +2351,7 @@ is-arguments@^1.1.1:
call-bind "^1.0.2"
has-tostringtag "^1.0.0"
-is-array-buffer@^3.0.1, is-array-buffer@^3.0.2:
+is-array-buffer@^3.0.1:
version "3.0.2"
resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe"
integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==
@@ -2396,7 +2515,7 @@ is-typed-array@^1.1.10:
dependencies:
which-typed-array "^1.1.11"
-is-valid-domain@^0.1.6:
+is-valid-domain@0.1.6:
version "0.1.6"
resolved "https://registry.yarnpkg.com/is-valid-domain/-/is-valid-domain-0.1.6.tgz#3c85469d2938f170c8f82ce6e52df8ad9fca8105"
integrity sha512-ZKtq737eFkZr71At8NxOFcP9O1K89gW3DkdrGMpp1upr/ueWjj+Weh4l9AI4rN0Gt8W2M1w7jrG2b/Yv83Ljpg==
@@ -2438,7 +2557,7 @@ isexe@^2.0.0:
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
-jackspeak@^2.3.5:
+jackspeak@^2.0.3, jackspeak@^2.3.5:
version "2.3.6"
resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8"
integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==
@@ -2491,7 +2610,7 @@ jsii-docgen@^10.2.3:
semver "^7.5.4"
yargs "^16.2.0"
-jsii-pacmak@^1.87.0, jsii-pacmak@^1.93.0:
+jsii-pacmak@1.93.0, jsii-pacmak@^1.93.0:
version "1.93.0"
resolved "https://registry.yarnpkg.com/jsii-pacmak/-/jsii-pacmak-1.93.0.tgz#5793c251cb45963d57bc839cf8bbe64d8e5b998f"
integrity sha512-A2rn4seHN+1/VzwQ0H8t6zxAz9HpZWbF+kVi9MpNgqd2iiNYxS1XNyirzyQ8D3e5ZNWoPAyFVuGqkXrtdo4etg==
@@ -2522,25 +2641,7 @@ jsii-reflect@^1.93.0:
oo-ascii-tree "^1.93.0"
yargs "^16.2.0"
-jsii-rosetta@^1.93.0:
- version "1.93.0"
- resolved "https://registry.yarnpkg.com/jsii-rosetta/-/jsii-rosetta-1.93.0.tgz#951e8ae27ceaf0504abd74c15866f6050c97ef82"
- integrity sha512-5HFoC6Cp3Y3usCGuTRDTL/ovgz9MxI6/kY4Re8agVShXR6MPSX6F6Sc1qGMUjf3ynFfPz+DMsBY0Z164cxVKBA==
- dependencies:
- "@jsii/check-node" "1.93.0"
- "@jsii/spec" "1.93.0"
- "@xmldom/xmldom" "^0.8.10"
- commonmark "^0.30.0"
- fast-glob "^3.3.2"
- jsii "1.93.0"
- semver "^7.5.4"
- semver-intersect "^1.4.0"
- stream-json "^1.8.0"
- typescript "~3.9.10"
- workerpool "^6.5.1"
- yargs "^16.2.0"
-
-jsii-rosetta@^5.1.9:
+jsii-rosetta@5.3.2:
version "5.3.2"
resolved "https://registry.yarnpkg.com/jsii-rosetta/-/jsii-rosetta-5.3.2.tgz#2683f16cfd035277dc9843e9087556827938ce56"
integrity sha512-pY5wm72XcT9RdTWh/UpIVnyLScz381GtmlW4ey9ojJdHUoxvtni0vdGdSl+ZaojqEQR7TLdUM+ocLgB8Xnujxw==
@@ -2559,6 +2660,24 @@ jsii-rosetta@^5.1.9:
workerpool "^6.5.1"
yargs "^17.7.2"
+jsii-rosetta@^1.93.0:
+ version "1.93.0"
+ resolved "https://registry.yarnpkg.com/jsii-rosetta/-/jsii-rosetta-1.93.0.tgz#951e8ae27ceaf0504abd74c15866f6050c97ef82"
+ integrity sha512-5HFoC6Cp3Y3usCGuTRDTL/ovgz9MxI6/kY4Re8agVShXR6MPSX6F6Sc1qGMUjf3ynFfPz+DMsBY0Z164cxVKBA==
+ dependencies:
+ "@jsii/check-node" "1.93.0"
+ "@jsii/spec" "1.93.0"
+ "@xmldom/xmldom" "^0.8.10"
+ commonmark "^0.30.0"
+ fast-glob "^3.3.2"
+ jsii "1.93.0"
+ semver "^7.5.4"
+ semver-intersect "^1.4.0"
+ stream-json "^1.8.0"
+ typescript "~3.9.10"
+ workerpool "^6.5.1"
+ yargs "^16.2.0"
+
jsii-rosetta@~5.2.0:
version "5.2.6"
resolved "https://registry.yarnpkg.com/jsii-rosetta/-/jsii-rosetta-5.2.6.tgz#a5c6d37cff1992f7089f0766c22aea3e33c1c46c"
@@ -2578,7 +2697,7 @@ jsii-rosetta@~5.2.0:
workerpool "^6.5.1"
yargs "^17.7.2"
-jsii-srcmak@^0.1.951, jsii-srcmak@^0.1.954:
+jsii-srcmak@0.1.999:
version "0.1.999"
resolved "https://registry.yarnpkg.com/jsii-srcmak/-/jsii-srcmak-0.1.999.tgz#8cbfd975e87749153878bf21e29076547d83b37c"
integrity sha512-8jhGRjceKdvYlW3rujnrZWTa1bss7TUhcsVrRsT7Q+MDYxRZan0FsqyHKrjfb8GYpgSh5DVpc9iYCwmn6VgXsw==
@@ -2608,7 +2727,26 @@ jsii@1.93.0:
typescript "~3.9.10"
yargs "^16.2.0"
-jsii@^5.1.10, jsii@~5.3.0:
+jsii@5.3.2:
+ version "5.3.2"
+ resolved "https://registry.yarnpkg.com/jsii/-/jsii-5.3.2.tgz#3dc65c39dea3fb4e2f77fd7b48be5d3fef585962"
+ integrity sha512-wwwp47+6orlMXpny4dlTOP6776cBo2WFDgxZyGjQaV4VWNydsJiTcinuJzCj1XVZicBhpAnkuBMr89+2aT8Dcg==
+ dependencies:
+ "@jsii/check-node" "1.93.0"
+ "@jsii/spec" "^1.93.0"
+ case "^1.6.3"
+ chalk "^4"
+ downlevel-dts "^0.11.0"
+ fast-deep-equal "^3.1.3"
+ log4js "^6.9.1"
+ semver "^7.5.4"
+ semver-intersect "^1.5.0"
+ sort-json "^2.0.1"
+ spdx-license-list "^6.8.0"
+ typescript "~5.3"
+ yargs "^17.7.2"
+
+jsii@5.3.3, jsii@~5.3.0:
version "5.3.3"
resolved "https://registry.yarnpkg.com/jsii/-/jsii-5.3.3.tgz#49e12615543c9e0a6cbd2ed82dae347eb993c10c"
integrity sha512-M+kAUKJiLXXJXKYmBB0Q2n1aGoeNHyzMCLAx7402JqXSLxH4JGh6kOf4EH3U3LmQKzv2kxOHMRCg3Ssh82KtrQ==
@@ -2680,7 +2818,7 @@ json-schema-traverse@^1.0.0:
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2"
integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==
-json-stable-stringify@^1.0.2:
+json-stable-stringify@1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.1.0.tgz#43d39c7c8da34bfaf785a61a56808b0def9f747d"
integrity sha512-zfA+5SuwYN2VWqN1/5HZaDzQKLJHaBVMZIIM+wuYjdptkaQsqzDdqjqf+lZZJUuJq1aanHiY8LhH8LmH+qBYJA==
@@ -2793,7 +2931,7 @@ lodash.flatten@^4.4.0:
resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f"
integrity sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==
-lodash.isequal@^4.5.0:
+lodash.isequal@4.5.0, lodash.isequal@^4.5.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0"
integrity sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==
@@ -2818,7 +2956,7 @@ lodash@^4.17.15, lodash@^4.17.20:
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
-log4js@^6.7.0, log4js@^6.9.1:
+log4js@6.9.1, log4js@^6.9.1:
version "6.9.1"
resolved "https://registry.yarnpkg.com/log4js/-/log4js-6.9.1.tgz#aba5a3ff4e7872ae34f8b4c533706753709e38b6"
integrity sha512-1somDdy9sChrr9/f4UlzhdaGfDR2c/SaD2a4T7qEkG4jTS57/B3qmnjLYePwQ8cqWnUHZI0iAKxMBpCZICiZ2g==
@@ -2855,6 +2993,11 @@ lru-cache@^6.0.0:
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.1.0.tgz#2098d41c2dc56500e6c88584aa656c84de7d0484"
integrity sha512-/1clY/ui8CzjKFyjdvwPWJUYKiFVXG2I2cY0ssG7h4+hwk+XOIX7ZSG9Q7TW8TW3Kp3BUSqgFWBLgL4PJ+Blag==
+lru_map@^0.3.3:
+ version "0.3.3"
+ resolved "https://registry.yarnpkg.com/lru_map/-/lru_map-0.3.3.tgz#b5c8351b9464cbd750335a79650a0ec0e56118dd"
+ integrity sha512-Pn9cox5CsMYngeDbmChANltQl+5pi6XmTrraMSzhPmMBbmgcxmqWry0U3PGapCU1yB4/LqCcom7qhHZiF/jGfQ==
+
map-obj@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d"
@@ -2920,20 +3063,27 @@ min-indent@^1.0.0:
resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.1.tgz#a63f681673b30571fbe8bc25686ae746eefa9869"
integrity sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==
-minimatch@^3.0.4, minimatch@^3.1.1:
- version "3.1.2"
- resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
- integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==
+minimatch@5.1.0:
+ version "5.1.0"
+ resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7"
+ integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==
dependencies:
- brace-expansion "^1.1.7"
+ brace-expansion "^2.0.1"
-minimatch@^5.0.1, minimatch@^5.1.0:
+minimatch@5.1.6, minimatch@^5.0.1, minimatch@^5.1.0:
version "5.1.6"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.6.tgz#1cfcb8cf5522ea69952cd2af95ae09477f122a96"
integrity sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==
dependencies:
brace-expansion "^2.0.1"
+minimatch@^3.0.4, minimatch@^3.1.1:
+ version "3.1.2"
+ resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
+ integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==
+ dependencies:
+ brace-expansion "^1.1.7"
+
minimatch@^9.0.1:
version "9.0.3"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825"
@@ -3015,7 +3165,14 @@ node-abi@^3.3.0:
dependencies:
semver "^7.3.5"
-node-fetch@^2.6.12, node-fetch@^2.6.7:
+node-fetch@2.6.7:
+ version "2.6.7"
+ resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
+ integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
+ dependencies:
+ whatwg-url "^5.0.0"
+
+node-fetch@2.7.0, node-fetch@^2.6.12:
version "2.7.0"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d"
integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==
@@ -3116,7 +3273,7 @@ oo-ascii-tree@^1.93.0:
resolved "https://registry.yarnpkg.com/oo-ascii-tree/-/oo-ascii-tree-1.93.0.tgz#fbe47cd5d188353e6d4cc7cad8520ac790bd3ef4"
integrity sha512-zbmrGCL/UsvxV2WlnsSrqdkdxEggxH7eA1HOk+hmimLQu+eLO4Y3VGqwt0VK04Nfe6iG6GnzRL5/XjH0j1v8bQ==
-open@^7.4.2:
+open@7.4.2:
version "7.4.2"
resolved "https://registry.yarnpkg.com/open/-/open-7.4.2.tgz#b8147e26dcf3e426316c730089fd71edd29c2321"
integrity sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==
@@ -3196,7 +3353,7 @@ param-case@^3.0.4:
dot-case "^3.0.4"
tslib "^2.0.3"
-parse-gitignore@^1.0.1:
+parse-gitignore@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/parse-gitignore/-/parse-gitignore-1.0.1.tgz#8b9dc57f17b810d495c5dfa62eb07caffe7758c7"
integrity sha512-UGyowyjtx26n65kdAMWhm6/3uy5uSrpcuH7tt+QEVudiBoVS+eqHxD5kbi9oWVRwj7sCzXqwuM+rUGw7earl6A==
@@ -3290,12 +3447,12 @@ picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1:
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
-pidtree@^0.6.0:
+pidtree@0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/pidtree/-/pidtree-0.6.0.tgz#90ad7b6d42d5841e69e0a2419ef38f8883aa057c"
integrity sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==
-pidusage@^3.0.2:
+pidusage@3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/pidusage/-/pidusage-3.0.2.tgz#6faa5402b2530b3af2cf93d13bcf202889724a53"
integrity sha512-g0VU+y08pKw5M8EZ2rIGiEBaB8wrQMjYGFfW2QVIfyT8V+fq8YFLkvlz4bz5ljvFDJYNFCWT3PWqcRr2FKO81w==
@@ -3312,7 +3469,7 @@ pify@^3.0.0:
resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176"
integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==
-pkg-up@^3.1.0:
+pkg-up@3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5"
integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==
@@ -3337,20 +3494,20 @@ prebuild-install@^7.1.1:
tar-fs "^2.0.0"
tunnel-agent "^0.6.0"
-prettier@^2.8.6:
- version "2.8.8"
- resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da"
- integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==
+prettier@2.8.7:
+ version "2.8.7"
+ resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.7.tgz#bb79fc8729308549d28fe3a98fce73d2c0656450"
+ integrity sha512-yPngTo3aXUUmyuTjeTUT75txrf+aMh9FiD7q9ZE/i6r0bPb22g4FsE6Y338PQX1bmfy08i9QQCB7/rcUAVntfw==
process-nextick-args@~2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
-projen@^0.78.11:
- version "0.78.11"
- resolved "https://registry.yarnpkg.com/projen/-/projen-0.78.11.tgz#e54566f72e9d18fdc4002d2704555fdb100b84d4"
- integrity sha512-SfAMFJcXNWwFbt/HYS/bnYiBQK2nS33bp0tWV3j3Eb6mTiiwyEAjMF5l1rp9Fi+zZ1rV6ai2el2kaVQmcl3K3A==
+projen@^0.78.13:
+ version "0.78.13"
+ resolved "https://registry.yarnpkg.com/projen/-/projen-0.78.13.tgz#5c6693ababa4f2e7d93759c722a35d41c9c2e691"
+ integrity sha512-ihL1lcfmi7M0EA7qgdXGja2SLLc6vtsQ1Wd2RqqxOuFnchw9/kZubdrxy38J8iI36AEwb0Qucb1uLTuwFhl7Qw==
dependencies:
"@iarna/toml" "^2.2.5"
case "^1.6.3"
@@ -3509,7 +3666,7 @@ redent@^3.0.0:
indent-string "^4.0.0"
strip-indent "^3.0.0"
-regexp.prototype.flags@^1.5.1:
+regexp.prototype.flags@^1.4.3:
version "1.5.1"
resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz#90ce989138db209f81492edd734183ce99f9677e"
integrity sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==
@@ -3538,7 +3695,7 @@ require-main-filename@^2.0.0:
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
-reserved-words@^0.1.2:
+reserved-words@0.1.2:
version "0.1.2"
resolved "https://registry.yarnpkg.com/reserved-words/-/reserved-words-0.1.2.tgz#00a0940f98cd501aeaaac316411d9adc52b31ab1"
integrity sha512-0S5SrIUJ9LfpbVl4Yzij6VipUdafHrOTzvmfazSw/jeZrZtQK303OPZW+obtkaw7jQlTQppy0UvZWm9872PbRw==
@@ -3622,18 +3779,18 @@ semver-intersect@^1.4.0, semver-intersect@^1.5.0:
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==
-semver@^6.0.0, semver@^6.3.0:
- version "6.3.1"
- resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
- integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
-
-semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.5.3, semver@^7.5.4:
+semver@7.5.4, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.5.3, semver@^7.5.4:
version "7.5.4"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e"
integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==
dependencies:
lru-cache "^6.0.0"
+semver@^6.0.0, semver@^6.3.0:
+ version "6.3.1"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
+ integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
+
sentence-case@^3.0.4:
version "3.0.4"
resolved "https://registry.yarnpkg.com/sentence-case/-/sentence-case-3.0.4.tgz#3645a7b8c117c787fde8702056225bb62a45131f"
@@ -3815,7 +3972,7 @@ sprintf-js@~1.0.2:
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
-sscaff@^1.2.274:
+sscaff@1.2.274:
version "1.2.274"
resolved "https://registry.yarnpkg.com/sscaff/-/sscaff-1.2.274.tgz#3ae52042fbeb244b01b89542a56ce5b247284be9"
integrity sha512-sztRa50SL1LVxZnF1au6QT1SC2z0S1oEOyi2Kpnlg6urDns93aL32YxiJcNkLcY+VHFtVqm/SRv4cb+6LeoBQA==
@@ -3854,7 +4011,7 @@ stop-iteration-iterator@^1.0.0:
dependencies:
internal-slot "^1.0.4"
-stream-buffers@^3.0.2:
+stream-buffers@3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/stream-buffers/-/stream-buffers-3.0.2.tgz#5249005a8d5c2d00b3a32e6e0a6ea209dc4f3521"
integrity sha512-DQi1h8VEBA/lURbSwFtEHnSTb9s2/pwLEaFuNhXwy1Dx3Sa0lOuYT2yNUr4/j2fs8oCAMANtrZ5OrPZtyVs3MQ==
@@ -3922,7 +4079,7 @@ stringify-package@^1.0.1:
resolved "https://registry.yarnpkg.com/stringify-package/-/stringify-package-1.0.1.tgz#e5aa3643e7f74d0f28628b72f3dad5cecfc3ba85"
integrity sha512-sa4DUQsYciMP1xhKWGuFM04fB0LG/9DlluZoSVywUMRNvzid6XucHK0/90xGxRoHrAaROrcHK1aPKaijCtSrhg==
-"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1:
+"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@6.0.1, strip-ansi@^6.0.0, strip-ansi@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
@@ -4052,12 +4209,12 @@ trim-newlines@^3.0.0:
resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144"
integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==
-tslib@^2.0.3:
+tslib@^2.0.3, "tslib@^2.4.1 || ^1.9.3":
version "2.6.2"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
-tunnel-agent@^0.6.0:
+tunnel-agent@0.6.0, tunnel-agent@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd"
integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==
@@ -4172,12 +4329,12 @@ util-deprecate@^1.0.1, util-deprecate@~1.0.1:
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
-uuid@^8.3.2:
+uuid@8.3.2, uuid@^8.3.2:
version "8.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==
-uuid@^9.0.1:
+uuid@9.0.1:
version "9.0.1"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30"
integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==
@@ -4229,7 +4386,7 @@ which-module@^2.0.0:
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.1.tgz#776b1fe35d90aebe99e8ac15eb24093389a4a409"
integrity sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==
-which-typed-array@^1.1.11, which-typed-array@^1.1.13:
+which-typed-array@^1.1.11, which-typed-array@^1.1.9:
version "1.1.13"
resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.13.tgz#870cd5be06ddb616f504e7b039c4c24898184d36"
integrity sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==
@@ -4301,7 +4458,7 @@ ws@^7, ws@^7.5.5:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
-xml-js@^1.6.11:
+xml-js@1.6.11:
version "1.6.11"
resolved "https://registry.yarnpkg.com/xml-js/-/xml-js-1.6.11.tgz#927d2f6947f7f1c19a316dd8eea3614e8b18f8e9"
integrity sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==
@@ -4323,7 +4480,7 @@ xmlbuilder@^15.1.1:
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-15.1.1.tgz#9dcdce49eea66d8d10b42cae94a79c3c8d0c2ec5"
integrity sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==
-xstate@^4.34.0:
+xstate@4.38.3:
version "4.38.3"
resolved "https://registry.yarnpkg.com/xstate/-/xstate-4.38.3.tgz#4e15e7ad3aa0ca1eea2010548a5379966d8f1075"
integrity sha512-SH7nAaaPQx57dx6qvfcIgqKRXIh4L0A1iYEqim4s1u7c9VoCgzZc+63FY90AKU4ZzOC2cfJzTnpO4zK7fCUzzw==
@@ -4371,6 +4528,32 @@ yargs-parser@^21.1.1:
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35"
integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==
+yargs@17.6.2:
+ version "17.6.2"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.6.2.tgz#2e23f2944e976339a1ee00f18c77fedee8332541"
+ integrity sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==
+ dependencies:
+ cliui "^8.0.1"
+ escalade "^3.1.1"
+ get-caller-file "^2.0.5"
+ require-directory "^2.1.1"
+ string-width "^4.2.3"
+ y18n "^5.0.5"
+ yargs-parser "^21.1.1"
+
+yargs@17.7.2, yargs@^17.7.2:
+ version "17.7.2"
+ resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269"
+ integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==
+ dependencies:
+ cliui "^8.0.1"
+ escalade "^3.1.1"
+ get-caller-file "^2.0.5"
+ require-directory "^2.1.1"
+ string-width "^4.2.3"
+ y18n "^5.0.5"
+ yargs-parser "^21.1.1"
+
yargs@^15.4.1:
version "15.4.1"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8"
@@ -4401,19 +4584,6 @@ yargs@^16.0.0, yargs@^16.2.0:
y18n "^5.0.5"
yargs-parser "^20.2.2"
-yargs@^17.6, yargs@^17.7.2:
- version "17.7.2"
- resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269"
- integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==
- dependencies:
- cliui "^8.0.1"
- escalade "^3.1.1"
- get-caller-file "^2.0.5"
- require-directory "^2.1.1"
- string-width "^4.2.3"
- y18n "^5.0.5"
- yargs-parser "^21.1.1"
-
yauzl@^2.10.0:
version "2.10.0"
resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9"
@@ -4427,7 +4597,7 @@ yocto-queue@^0.1.0:
resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
-yoga-layout-prebuilt@^1.10.0, yoga-layout-prebuilt@^1.9.6:
+yoga-layout-prebuilt@1.10.0, yoga-layout-prebuilt@^1.9.6:
version "1.10.0"
resolved "https://registry.yarnpkg.com/yoga-layout-prebuilt/-/yoga-layout-prebuilt-1.10.0.tgz#2936fbaf4b3628ee0b3e3b1df44936d6c146faa6"
integrity sha512-YnOmtSbv4MTf7RGJMK0FvZ+KD8OEe/J5BNnR0GHhD8J/XcG/Qvxgszm0Un6FTHWW4uHlTgP0IztiXQnGyIR45g==
@@ -4443,7 +4613,7 @@ zip-stream@^4.1.0:
compress-commons "^4.1.2"
readable-stream "^3.6.0"
-zod@^3.22.4:
+zod@3.22.4:
version "3.22.4"
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff"
integrity sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==