Skip to content

Commit

Permalink
Merge pull request #4 from ambarltd/latest
Browse files Browse the repository at this point in the history
Latest Ambar API changes
  • Loading branch information
tjschutte authored Feb 5, 2024
2 parents 7f419b7 + b795f3d commit da04fa4
Show file tree
Hide file tree
Showing 15 changed files with 134 additions and 159 deletions.
1 change: 1 addition & 0 deletions .github/terraform_logo_dark.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions .github/terraform_logo_light.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
6 changes: 6 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,10 @@ jobs:
(echo; echo "Unexpected difference in directories after code generation. Run 'go generate ./...' command and commit."; exit 1)
# Run acceptance tests in a matrix with Terraform CLI versions
# This will create real resources, so we will only run this on the main branch
# when a pr has been accepted
test:
if: github.ref == 'refs/heads/main'
name: Terraform Provider Acceptance Tests
needs: build
runs-on: ubuntu-latest
Expand All @@ -65,6 +68,9 @@ jobs:
- '1.2.*'
- '1.3.*'
- '1.4.*'
- '1.5.*'
- '1.6.*'
- '1.7.*'
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0
Expand Down
8 changes: 7 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
## 1.0.0 (Initial Release)
## 1.0.1
FEATURES:
* Removed DataDestination DestinationName field
* Removed DataSource top level fields, they should instead be passed as part of the DataSourceConfig map
* Updated provider to use the latest Ambar SDK
* Minor improvements to debug logging

## 1.0.0 (Initial Release)
FEATURES:
* Ambar initial Terraform support.
* Support for Ambar DataSource resources like the Postgres DataSourceType
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

The [Ambar Provider](https://registry.terraform.io/providers/ambarltd/ambar/latest/docs) allows [Terraform](https://terraform.io) to manage [Ambar](https://ambar.cloud) resources.

- [Contributing guide] *coming soon*
- [Contributing guide / Roadmap] *coming soon*
- [FAQ] *coming soon*
- [Tutorials and Examples] *coming soon*
- [Help and Support] *coming soon*
2 changes: 0 additions & 2 deletions docs/resources/data_destination.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ resource "ambar_data_destination" "example_destination" {
]
description = "My Terraform DataDestination"
destination_endpoint = "https://1.2.3.4.com/data"
destination_name = "ambar-dest"
username = "username"
password = "password"
}
Expand All @@ -37,7 +36,6 @@ resource "ambar_data_destination" "example_destination" {

- `description` (String) A user friendly description of this DataDestination. Use the description filed to help augment information about this DataDestination which may may not be apparent from describing the resource, such as details about the filtered record sequences being sent.
- `destination_endpoint` (String) The HTTP endpoint where Ambar will send your filtered record sequences to.
- `destination_name` (String) An optional name which Ambar will use when pushing record sequences to your Destination. Used to identify which Destination is
- `filter_ids` (List of String) A List of Ambar resource ids belonging to Ambar Filter resources which should be used with this DataDestination. These control what DataSources and applied filters will be delivered to your destination. Note that a DataSource can only be used once per DataDestination.

### Read-Only
Expand Down
44 changes: 33 additions & 11 deletions docs/resources/data_source.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,23 +14,49 @@ Ambar DataSource resource. Represents the details needed for Ambar to establish

```terraform
resource "ambar_data_source" "example_data_source" {
data_source_type = "postgres"
description = "My Terraform DataSource"
partitioning_column = "partition"
serial_column = "serial"
username = "username"
password = "password"
data_source_type = "postgres"
description = "My Terraform Postgres DataSource"
# data_source_config key-values depend on the type of DataSource being created.
# See Ambar docs for more details.
data_source_config = {
"hostname" : "host",
"hostPort" : "5432",
"username" : "username",
"password" : "password"
"databaseName" : "postgres",
"tableName" : "events",
"publicationName" : "example_pub",
"partitioningColumn" : "partition",
"serialColumn" : "serial",
# columns should include all columns to be read from the database
# including the partition and serial columns
"columns" : "partition,serial,some,other,column"
"columns" : "partition,serial,some,other,column",
# tls termination override is optional
"tlsTerminationOverrideHost" : "tls.termination.host"
}
}
resource "ambar_data_source" "example_mysql_data_source" {
data_source_type = "mysql"
description = "My Terraform MySQL DataSource"
# data_source_config key-values depend on the type of DataSource being created.
# See Ambar docs for more details.
data_source_config = {
"hostname" : "host",
"hostPort" : "5432",
"username" : "username",
"password" : "password"
"databaseName" : "postgres",
"tableName" : "events",
"publicationName" : "example_pub",
"partitioningColumn" : "partition",
"incrementingColumn" : "incrementing",
# columns should include all columns to be read from the database
# including the partition and incrementing columns
"columns" : "partition,incrementing,some,other,column",
"binLogReplicationServerId" : 1001,
# tls termination override is optional
"tlsTerminationOverrideHost" : "tls.termination.host"
}
}
```
Expand All @@ -42,10 +68,6 @@ resource "ambar_data_source" "example_data_source" {

- `data_source_config` (Map of String) A Key Value map of further DataSource configurations specific to the type of database this DataSource will connect to. See Ambar documentation for a list of required parameters.
- `data_source_type` (String) The type of durable storage being connected to. This should be one of the supported database types by Ambar such as postgres. See Ambar documentation for a full list of supported data_source_types.
- `partitioning_column` (String) The name of the column which records in the database are partitioned on.
- `password` (String, Sensitive) A password credential which Ambar can use to communicate with your database storage.
- `serial_column` (String) The name of a column which increments with each write to the database.
- `username` (String, Sensitive) A username credential which Ambar can use to communicate with your database storage.

### Optional

Expand Down
1 change: 0 additions & 1 deletion examples/resources/ambar_data_destination/resource.tf
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ resource "ambar_data_destination" "example_destination" {
]
description = "My Terraform DataDestination"
destination_endpoint = "https://1.2.3.4.com/data"
destination_name = "ambar-dest"
username = "username"
password = "password"
}
40 changes: 33 additions & 7 deletions examples/resources/ambar_data_source/resource.tf
Original file line number Diff line number Diff line change
@@ -1,20 +1,46 @@
resource "ambar_data_source" "example_data_source" {
data_source_type = "postgres"
description = "My Terraform DataSource"
partitioning_column = "partition"
serial_column = "serial"
username = "username"
password = "password"
data_source_type = "postgres"
description = "My Terraform Postgres DataSource"
# data_source_config key-values depend on the type of DataSource being created.
# See Ambar docs for more details.
data_source_config = {
"hostname" : "host",
"hostPort" : "5432",
"username" : "username",
"password" : "password"
"databaseName" : "postgres",
"tableName" : "events",
"publicationName" : "example_pub",
"partitioningColumn" : "partition",
"serialColumn" : "serial",
# columns should include all columns to be read from the database
# including the partition and serial columns
"columns" : "partition,serial,some,other,column"
"columns" : "partition,serial,some,other,column",
# tls termination override is optional
"tlsTerminationOverrideHost" : "tls.termination.host"
}
}

resource "ambar_data_source" "example_mysql_data_source" {
data_source_type = "mysql"
description = "My Terraform MySQL DataSource"
# data_source_config key-values depend on the type of DataSource being created.
# See Ambar docs for more details.
data_source_config = {
"hostname" : "host",
"hostPort" : "5432",
"username" : "username",
"password" : "password"
"databaseName" : "postgres",
"tableName" : "events",
"publicationName" : "example_pub",
"partitioningColumn" : "partition",
"incrementingColumn" : "incrementing",
# columns should include all columns to be read from the database
# including the partition and incrementing columns
"columns" : "partition,incrementing,some,other,column",
"binLogReplicationServerId" : 1001,
# tls termination override is optional
"tlsTerminationOverrideHost" : "tls.termination.host"
}
}
14 changes: 7 additions & 7 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ module terraform-provider-ambar
go 1.20

require (
github.com/ambarltd/ambar_go_client v0.0.0-20240122101839-da73b5df3b3a
github.com/ambarltd/ambar_go_client v0.0.0-20240205224211-ad1560f6ac69
github.com/hashicorp/terraform-plugin-docs v0.18.0
github.com/hashicorp/terraform-plugin-framework v1.5.0
github.com/hashicorp/terraform-plugin-go v0.21.0
Expand Down Expand Up @@ -31,7 +31,7 @@ require (
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 // indirect
github.com/hashicorp/go-hclog v1.5.0 // indirect
github.com/hashicorp/go-hclog v1.6.2 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-plugin v1.6.0 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect
Expand All @@ -55,7 +55,7 @@ require (
github.com/mitchellh/go-wordwrap v1.0.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/oklog/run v1.0.0 // indirect
github.com/oklog/run v1.1.0 // indirect
github.com/posener/complete v1.2.3 // indirect
github.com/russross/blackfriday v1.6.0 // indirect
github.com/shopspring/decimal v1.3.1 // indirect
Expand All @@ -66,14 +66,14 @@ require (
github.com/yuin/goldmark v1.6.0 // indirect
github.com/yuin/goldmark-meta v1.1.0 // indirect
github.com/zclconf/go-cty v1.14.1 // indirect
golang.org/x/crypto v0.17.0 // indirect
golang.org/x/crypto v0.18.0 // indirect
golang.org/x/exp v0.0.0-20230809150735-7b3493d9a819 // indirect
golang.org/x/mod v0.14.0 // indirect
golang.org/x/net v0.19.0 // indirect
golang.org/x/sys v0.15.0 // indirect
golang.org/x/net v0.20.0 // indirect
golang.org/x/sys v0.16.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240205150955-31a09d347014 // indirect
google.golang.org/grpc v1.61.0 // indirect
google.golang.org/protobuf v1.32.0 // indirect
gopkg.in/yaml.v2 v2.3.0 // indirect
Expand Down
Loading

0 comments on commit da04fa4

Please sign in to comment.