diff --git a/CHANGELOG.md b/CHANGELOG.md index 01a0e46a..ac473da9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,16 @@ # Change Log - + +## [v1.11.134](https://github.com/inspec/inspec-gcp/tree/v1.11.134) (2024-10-16) + +#### Merged Pull Requests +- CHEF-12247-V0-MAGIC-MODULE-dataproc_v1-Batch - Resource Implementation [#655](https://github.com/inspec/inspec-gcp/pull/655) ([sa-progress](https://github.com/sa-progress)) + + ## [v1.11.133](https://github.com/inspec/inspec-gcp/tree/v1.11.133) (2024-10-10) #### Merged Pull Requests - CHEF-12479 Automatically generated by magic modules for service: run_v2 and reso… [#651](https://github.com/inspec/inspec-gcp/pull/651) ([sa-progress](https://github.com/sa-progress)) - ## [v1.11.132](https://github.com/inspec/inspec-gcp/tree/v1.11.132) (2024-10-10) diff --git a/README.md b/README.md index 43fd18c3..59a0bab6 100644 --- a/README.md +++ b/README.md @@ -295,6 +295,7 @@ The following resources are available in the InSpec GCP Profile | [google_data_fusion_instance](docs/resources/google_data_fusion_instance.md) | [google_data_fusion_instances](docs/resources/google_data_fusion_instances.md) | | [google_dataflow_project_location_job](docs/resources/google_dataflow_project_location_job.md) | [google_dataflow_project_location_jobs](docs/resources/google_dataflow_project_location_jobs.md) | | [google_dataproc_autoscaling_policy](docs/resources/google_dataproc_autoscaling_policy.md) | [google_dataproc_autoscaling_policies](docs/resources/google_dataproc_autoscaling_policies.md) | +| [google_dataproc_batch](docs/resources/google_dataproc_batch.md) | [google_dataproc_batches](docs/resources/google_dataproc_batches.md) | | [google_dataproc_cluster](docs/resources/google_dataproc_cluster.md) | [google_dataproc_clusters](docs/resources/google_dataproc_clusters.md) | | [google_dataproc_job](docs/resources/google_dataproc_job.md) | [google_dataproc_jobs](docs/resources/google_dataproc_jobs.md) | | [google_dataproc_metastore_federation](docs/resources/google_dataproc_metastore_federation.md) | [google_dataproc_metastore_federations](docs/resources/google_dataproc_metastore_federations.md) | diff --git a/VERSION b/VERSION index 5ff11acf..ecbabbc0 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.11.133 \ No newline at end of file +1.11.134 \ No newline at end of file diff --git a/docs/resources/google_dataproc_batch.md b/docs/resources/google_dataproc_batch.md new file mode 100644 index 00000000..757f3681 --- /dev/null +++ b/docs/resources/google_dataproc_batch.md @@ -0,0 +1,225 @@ ++++ + +title = "google_dataproc_batch Resource" +platform = "gcp" +draft = false +gh_repo = "inspec-gcp" + + +[menu.inspec] + +title = "google_dataproc_batch" +identifier = "inspec/resources/gcp/google_dataproc_batch Resource" +parent = "inspec/resources/gcp" ++++ + +Use the `google_dataproc_batch` InSpec audit resource to test the properties of a test a Google Batch. + +## Installation +{{% inspec_gcp_install %}} + +## Syntax +A `google_dataproc_batch` is used to test a Google Batch resource + +## Examples +``` +describe google_dataproc_batch(name: 'projects/*/locations/*/batches/value_name') do + it { should exist } + its('name') { should cmp 'value_name' } + its('uuid') { should cmp 'value_uuid' } + its('create_time') { should cmp 'value_createtime' } + its('state') { should cmp 'value_state' } + its('state_message') { should cmp 'value_statemessage' } + its('state_time') { should cmp 'value_statetime' } + its('creator') { should cmp 'value_creator' } + its('operation') { should cmp 'value_operation' } +end + +describe google_dataproc_batch(name: "does_not_exit") do + it { should_not exist } +end +``` + +## Parameters +Properties that can be accessed from the `google_dataproc_batch` resource: + +## Properties +Properties that can be accessed from the `google_dataproc_batch` resource: + + + * `name`: Output only. The resource name of the batch. + + * `uuid`: Output only. A batch UUID (Unique Universal Identifier). The service generates this value when it creates the batch. + + * `create_time`: Output only. The time when the batch was created. + + * `pyspark_batch`: A configuration for running an Apache PySpark (https://spark.apache.org/docs/latest/api/python/getting_started/quickstart.html) batch workload. + + * `main_python_file_uri`: Required. The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file. + + * `args`: Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as --conf, since a collision can occur that causes an incorrect batch submission. + + * `python_file_uris`: Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip. + + * `jar_file_uris`: Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks. + + * `file_uris`: Optional. HCFS URIs of files to be placed in the working directory of each executor. + + * `archive_uris`: Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. + + * `spark_batch`: A configuration for running an Apache Spark (https://spark.apache.org/) batch workload. + + * `main_jar_file_uri`: Optional. The HCFS URI of the jar file that contains the main class. + + * `main_class`: Optional. The name of the driver main class. The jar file that contains the class must be in the classpath or specified in jar_file_uris. + + * `args`: Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as --conf, since a collision can occur that causes an incorrect batch submission. + + * `jar_file_uris`: Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks. + + * `file_uris`: Optional. HCFS URIs of files to be placed in the working directory of each executor. + + * `archive_uris`: Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. + + * `spark_r_batch`: A configuration for running an Apache SparkR (https://spark.apache.org/docs/latest/sparkr.html) batch workload. + + * `main_r_file_uri`: Required. The HCFS URI of the main R file to use as the driver. Must be a .R or .r file. + + * `args`: Optional. The arguments to pass to the Spark driver. Do not include arguments that can be set as batch properties, such as --conf, since a collision can occur that causes an incorrect batch submission. + + * `file_uris`: Optional. HCFS URIs of files to be placed in the working directory of each executor. + + * `archive_uris`: Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip. + + * `spark_sql_batch`: A configuration for running Apache Spark SQL (https://spark.apache.org/sql/) queries as a batch workload. + + * `query_file_uri`: Required. The HCFS URI of the script that contains Spark SQL queries to execute. + + * `query_variables`: Optional. Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";). + + * `additional_properties`: + + * `jar_file_uris`: Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. + + * `runtime_info`: Runtime information about workload execution. + + * `endpoints`: Output only. Map of remote access endpoints (such as web interfaces and APIs) to their URIs. + + * `additional_properties`: + + * `output_uri`: Output only. A URI pointing to the location of the stdout and stderr of the workload. + + * `diagnostic_output_uri`: Output only. A URI pointing to the location of the diagnostics tarball. + + * `approximate_usage`: Usage metrics represent approximate total resources consumed by a workload. + + * `milli_dcu_seconds`: Optional. DCU (Dataproc Compute Units) usage in (milliDCU x seconds) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)). + + * `shuffle_storage_gb_seconds`: Optional. Shuffle storage usage in (GB x seconds) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)). + + * `milli_accelerator_seconds`: Optional. Accelerator usage in (milliAccelerator x seconds) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)). + + * `accelerator_type`: Optional. Accelerator type being used, if any + + * `current_usage`: The usage snapshot represents the resources consumed by a workload at a specified time. + + * `milli_dcu`: Optional. Milli (one-thousandth) Dataproc Compute Units (DCUs) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)). + + * `shuffle_storage_gb`: Optional. Shuffle Storage in gigabytes (GB). (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)) + + * `milli_dcu_premium`: Optional. Milli (one-thousandth) Dataproc Compute Units (DCUs) charged at premium tier (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)). + + * `shuffle_storage_gb_premium`: Optional. Shuffle Storage in gigabytes (GB) charged at premium tier. (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)) + + * `milli_accelerator`: Optional. Milli (one-thousandth) accelerator. (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)) + + * `accelerator_type`: Optional. Accelerator type being used, if any + + * `snapshot_time`: Optional. The timestamp of the usage snapshot. + + * `state`: Output only. The state of the batch. + Possible values: + * STATE_UNSPECIFIED + * PENDING + * RUNNING + * CANCELLING + * CANCELLED + * SUCCEEDED + * FAILED + + * `state_message`: Output only. Batch state details, such as a failure description if the state is FAILED. + + * `state_time`: Output only. The time when the batch entered a current state. + + * `creator`: Output only. The email address of the user who created the batch. + + * `labels`: Optional. The labels to associate with this batch. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). Label values may be empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a batch. + + * `additional_properties`: + + * `runtime_config`: Runtime configuration for a workload. + + * `version`: Optional. Version of the batch runtime. + + * `container_image`: Optional. Optional custom container image for the job runtime environment. If not specified, a default container image will be used. + + * `properties`: Optional. A mapping of property names to values, which are used to configure workload execution. + + * `additional_properties`: + + * `repository_config`: Configuration for dependency repositories + + * `pypi_repository_config`: Configuration for PyPi repository + + * `pypi_repository`: Optional. PyPi repository address + + * `environment_config`: Environment configuration for a workload. + + * `execution_config`: Execution configuration for a workload. + + * `service_account`: Optional. Service account that used to execute workload. + + * `network_uri`: Optional. Network URI to connect workload to. + + * `subnetwork_uri`: Optional. Subnetwork URI to connect workload to. + + * `network_tags`: Optional. Tags used for network traffic control. + + * `kms_key`: Optional. The Cloud KMS key to use for encryption. + + * `idle_ttl`: Optional. Applies to sessions only. The duration to keep the session alive while it's idling. Exceeding this threshold causes the session to terminate. This field cannot be set on a batch workload. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of Duration (https://developers.google.com/protocol-buffers/docs/proto3#json)). Defaults to 1 hour if not set. If both ttl and idle_ttl are specified for an interactive session, the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idle_ttl or when ttl has been exceeded, whichever occurs first. + + * `ttl`: Optional. The duration after which the workload will be terminated, specified as the JSON representation for Duration (https://protobuf.dev/programming-guides/proto3/#json). When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a batch workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idle_ttl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idle_ttl or when ttl has been exceeded, whichever occurs first. + + * `staging_bucket`: Optional. A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket. + + * `peripherals_config`: Auxiliary services configuration for a workload. + + * `metastore_service`: Optional. Resource name of an existing Dataproc Metastore service.Example: projects/[project_id]/locations/[region]/services/[service_id] + + * `spark_history_server_config`: Spark History Server configuration for the workload. + + * `dataproc_cluster`: Optional. Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.Example: projects/[project_id]/regions/[region]/clusters/[cluster_name] + + * `operation`: Output only. The resource name of the operation associated with this batch. + + * `state_history`: Output only. Historical state information for the batch. + + * `state`: Output only. The state of the batch at this point in history. + Possible values: + * STATE_UNSPECIFIED + * PENDING + * RUNNING + * CANCELLING + * CANCELLED + * SUCCEEDED + * FAILED + + * `state_message`: Output only. Details about the state at this point in history. + + * `state_start_time`: Output only. The time when the batch entered the historical state. + + +## GCP Permissions + +Ensure the [Cloud Dataproc API](https://console.cloud.google.com/apis/library/dataproc.googleapis.com) is enabled for the current project. diff --git a/docs/resources/google_dataproc_batches.md b/docs/resources/google_dataproc_batches.md new file mode 100644 index 00000000..e0de208c --- /dev/null +++ b/docs/resources/google_dataproc_batches.md @@ -0,0 +1,88 @@ ++++ + +title = "google_dataproc_batches Resource" +platform = "gcp" +draft = false +gh_repo = "inspec-gcp" + + +[menu.inspec] + +title = "google_dataproc_batches" +identifier = "inspec/resources/gcp/google_dataproc_batches Resource" +parent = "inspec/resources/gcp" ++++ + +Use the `google_dataproc_batches` InSpec audit resource to test the properties of a test a Google Batch. + +## Installation +{{% inspec_gcp_install %}} + +## Syntax +A `google_dataproc_batches` is used to test a Google Batch resource + +## Examples +``` + describe google_dataproc_batches(parent: 'projects/*/locations/*') do + it { should exist } + its('names') { should include 'value_name' } + its('uuids') { should include 'value_uuid' } + its('create_times') { should include 'value_createtime' } + its('states') { should include 'value_state' } + its('state_messages') { should include 'value_statemessage' } + its('state_times') { should include 'value_statetime' } + its('creators') { should include 'value_creator' } + its('operations') { should include 'value_operation' } + end +``` + +## Parameters +Properties that can be accessed from the `google_dataproc_batches` resource: + +See [google_dataproc_batch.md](google_dataproc_batch.md) for more detailed information +* `names`: an array of `google_dataproc_batch` name +* `uuids`: an array of `google_dataproc_batch` uuid +* `create_times`: an array of `google_dataproc_batch` create_time +* `pyspark_batches`: an array of `google_dataproc_batch` pyspark_batch +* `spark_batches`: an array of `google_dataproc_batch` spark_batch +* `spark_r_batches`: an array of `google_dataproc_batch` spark_r_batch +* `spark_sql_batches`: an array of `google_dataproc_batch` spark_sql_batch +* `runtime_infos`: an array of `google_dataproc_batch` runtime_info +* `states`: an array of `google_dataproc_batch` state +* `state_messages`: an array of `google_dataproc_batch` state_message +* `state_times`: an array of `google_dataproc_batch` state_time +* `creators`: an array of `google_dataproc_batch` creator +* `labels`: an array of `google_dataproc_batch` labels +* `runtime_configs`: an array of `google_dataproc_batch` runtime_config +* `environment_configs`: an array of `google_dataproc_batch` environment_config +* `operations`: an array of `google_dataproc_batch` operation +* `state_histories`: an array of `google_dataproc_batch` state_history +## Properties +Properties that can be accessed from the `google_dataproc_batches` resource: + +See [google_dataproc_batch.md](google_dataproc_batch.md) for more detailed information +* `names`: an array of `google_dataproc_batch` name +* `uuids`: an array of `google_dataproc_batch` uuid +* `create_times`: an array of `google_dataproc_batch` create_time +* `pyspark_batches`: an array of `google_dataproc_batch` pyspark_batch +* `spark_batches`: an array of `google_dataproc_batch` spark_batch +* `spark_r_batches`: an array of `google_dataproc_batch` spark_r_batch +* `spark_sql_batches`: an array of `google_dataproc_batch` spark_sql_batch +* `runtime_infos`: an array of `google_dataproc_batch` runtime_info +* `states`: an array of `google_dataproc_batch` state +* `state_messages`: an array of `google_dataproc_batch` state_message +* `state_times`: an array of `google_dataproc_batch` state_time +* `creators`: an array of `google_dataproc_batch` creator +* `labels`: an array of `google_dataproc_batch` labels +* `runtime_configs`: an array of `google_dataproc_batch` runtime_config +* `environment_configs`: an array of `google_dataproc_batch` environment_config +* `operations`: an array of `google_dataproc_batch` operation +* `state_histories`: an array of `google_dataproc_batch` state_history + +## Filter Criteria +This resource supports all of the above properties as filter criteria, which can be used +with `where` as a block or a method. + +## GCP Permissions + +Ensure the [Cloud Dataproc API](https://console.cloud.google.com/apis/library/dataproc.googleapis.com) is enabled for the current project. diff --git a/inspec.yml b/inspec.yml index 19b85d1b..d2fb8d2f 100644 --- a/inspec.yml +++ b/inspec.yml @@ -4,7 +4,7 @@ maintainer: spaterson@chef.io,russell.seymour@turtlesystems.co.uk summary: This resource pack provides compliance resources_old_ignore for Google Cloud Platform copyright: spaterson@chef.io,russell.seymour@turtlesystems.co.uk copyright_email: spaterson@chef.io,russell.seymour@turtlesystems.co.uk -version: 1.11.133 +version: 1.11.134 license: Apache-2.0 inspec_version: '>= 4.7.3' supports: diff --git a/libraries/google/dataproc/property/batch_environment_config.rb b/libraries/google/dataproc/property/batch_environment_config.rb new file mode 100644 index 00000000..e8c14a5b --- /dev/null +++ b/libraries/google/dataproc/property/batch_environment_config.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +require 'google/dataproc/property/batch_environment_config_execution_config' +require 'google/dataproc/property/batch_environment_config_peripherals_config' +require 'google/dataproc/property/batch_environment_config_peripherals_config_spark_history_server_config' +module GoogleInSpec + module Dataproc + module Property + class BatchEnvironmentConfig + attr_reader :execution_config + + attr_reader :peripherals_config + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @execution_config = GoogleInSpec::Dataproc::Property::BatchEnvironmentConfigExecutionConfig.new(args['executionConfig'], to_s) + @peripherals_config = GoogleInSpec::Dataproc::Property::BatchEnvironmentConfigPeripheralsConfig.new(args['peripheralsConfig'], to_s) + end + + def to_s + "#{@parent_identifier} BatchEnvironmentConfig" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_environment_config_execution_config.rb b/libraries/google/dataproc/property/batch_environment_config_execution_config.rb new file mode 100644 index 00000000..43c43f0d --- /dev/null +++ b/libraries/google/dataproc/property/batch_environment_config_execution_config.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchEnvironmentConfigExecutionConfig + attr_reader :service_account + + attr_reader :network_uri + + attr_reader :subnetwork_uri + + attr_reader :network_tags + + attr_reader :kms_key + + attr_reader :idle_ttl + + attr_reader :ttl + + attr_reader :staging_bucket + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @service_account = args['serviceAccount'] + @network_uri = args['networkUri'] + @subnetwork_uri = args['subnetworkUri'] + @network_tags = args['networkTags'] + @kms_key = args['kmsKey'] + @idle_ttl = args['idleTtl'] + @ttl = args['ttl'] + @staging_bucket = args['stagingBucket'] + end + + def to_s + "#{@parent_identifier} BatchEnvironmentConfigExecutionConfig" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_environment_config_peripherals_config.rb b/libraries/google/dataproc/property/batch_environment_config_peripherals_config.rb new file mode 100644 index 00000000..e49262e3 --- /dev/null +++ b/libraries/google/dataproc/property/batch_environment_config_peripherals_config.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +require 'google/dataproc/property/batch_environment_config_peripherals_config_spark_history_server_config' +module GoogleInSpec + module Dataproc + module Property + class BatchEnvironmentConfigPeripheralsConfig + attr_reader :metastore_service + + attr_reader :spark_history_server_config + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @metastore_service = args['metastoreService'] + @spark_history_server_config = GoogleInSpec::Dataproc::Property::BatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig.new(args['sparkHistoryServerConfig'], to_s) + end + + def to_s + "#{@parent_identifier} BatchEnvironmentConfigPeripheralsConfig" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_environment_config_peripherals_config_spark_history_server_config.rb b/libraries/google/dataproc/property/batch_environment_config_peripherals_config_spark_history_server_config.rb new file mode 100644 index 00000000..cd775a6f --- /dev/null +++ b/libraries/google/dataproc/property/batch_environment_config_peripherals_config_spark_history_server_config.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig + attr_reader :dataproc_cluster + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @dataproc_cluster = args['dataprocCluster'] + end + + def to_s + "#{@parent_identifier} BatchEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_labels.rb b/libraries/google/dataproc/property/batch_labels.rb new file mode 100644 index 00000000..a0f4a17e --- /dev/null +++ b/libraries/google/dataproc/property/batch_labels.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchLabels + attr_reader :additional_properties + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @additional_properties = args['additionalProperties'] + end + + def to_s + "#{@parent_identifier} BatchLabels" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_pyspark_batch.rb b/libraries/google/dataproc/property/batch_pyspark_batch.rb new file mode 100644 index 00000000..9299c233 --- /dev/null +++ b/libraries/google/dataproc/property/batch_pyspark_batch.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchPysparkBatch + attr_reader :main_python_file_uri + + attr_reader :args + + attr_reader :python_file_uris + + attr_reader :jar_file_uris + + attr_reader :file_uris + + attr_reader :archive_uris + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @main_python_file_uri = args['mainPythonFileUri'] + @args = args['args'] + @python_file_uris = args['pythonFileUris'] + @jar_file_uris = args['jarFileUris'] + @file_uris = args['fileUris'] + @archive_uris = args['archiveUris'] + end + + def to_s + "#{@parent_identifier} BatchPysparkBatch" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_runtime_config.rb b/libraries/google/dataproc/property/batch_runtime_config.rb new file mode 100644 index 00000000..ae8fed26 --- /dev/null +++ b/libraries/google/dataproc/property/batch_runtime_config.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +require 'google/dataproc/property/batch_runtime_config_properties' +require 'google/dataproc/property/batch_runtime_config_repository_config' +require 'google/dataproc/property/batch_runtime_config_repository_config_pypi_repository_config' +module GoogleInSpec + module Dataproc + module Property + class BatchRuntimeConfig + attr_reader :version + + attr_reader :container_image + + attr_reader :properties + + attr_reader :repository_config + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @version = args['version'] + @container_image = args['containerImage'] + @properties = GoogleInSpec::Dataproc::Property::BatchRuntimeConfigProperties.new(args['properties'], to_s) + @repository_config = GoogleInSpec::Dataproc::Property::BatchRuntimeConfigRepositoryConfig.new(args['repositoryConfig'], to_s) + end + + def to_s + "#{@parent_identifier} BatchRuntimeConfig" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_runtime_config_properties.rb b/libraries/google/dataproc/property/batch_runtime_config_properties.rb new file mode 100644 index 00000000..1d2888a2 --- /dev/null +++ b/libraries/google/dataproc/property/batch_runtime_config_properties.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchRuntimeConfigProperties + attr_reader :additional_properties + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @additional_properties = args['additionalProperties'] + end + + def to_s + "#{@parent_identifier} BatchRuntimeConfigProperties" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_runtime_config_repository_config.rb b/libraries/google/dataproc/property/batch_runtime_config_repository_config.rb new file mode 100644 index 00000000..86a413f4 --- /dev/null +++ b/libraries/google/dataproc/property/batch_runtime_config_repository_config.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +require 'google/dataproc/property/batch_runtime_config_repository_config_pypi_repository_config' +module GoogleInSpec + module Dataproc + module Property + class BatchRuntimeConfigRepositoryConfig + attr_reader :pypi_repository_config + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @pypi_repository_config = GoogleInSpec::Dataproc::Property::BatchRuntimeConfigRepositoryConfigPypiRepositoryConfig.new(args['pypiRepositoryConfig'], to_s) + end + + def to_s + "#{@parent_identifier} BatchRuntimeConfigRepositoryConfig" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_runtime_config_repository_config_pypi_repository_config.rb b/libraries/google/dataproc/property/batch_runtime_config_repository_config_pypi_repository_config.rb new file mode 100644 index 00000000..eb49c0b9 --- /dev/null +++ b/libraries/google/dataproc/property/batch_runtime_config_repository_config_pypi_repository_config.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchRuntimeConfigRepositoryConfigPypiRepositoryConfig + attr_reader :pypi_repository + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @pypi_repository = args['pypiRepository'] + end + + def to_s + "#{@parent_identifier} BatchRuntimeConfigRepositoryConfigPypiRepositoryConfig" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_runtime_info.rb b/libraries/google/dataproc/property/batch_runtime_info.rb new file mode 100644 index 00000000..8edaa0a4 --- /dev/null +++ b/libraries/google/dataproc/property/batch_runtime_info.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +require 'google/dataproc/property/batch_runtime_info_approximate_usage' +require 'google/dataproc/property/batch_runtime_info_current_usage' +require 'google/dataproc/property/batch_runtime_info_endpoints' +module GoogleInSpec + module Dataproc + module Property + class BatchRuntimeInfo + attr_reader :endpoints + + attr_reader :output_uri + + attr_reader :diagnostic_output_uri + + attr_reader :approximate_usage + + attr_reader :current_usage + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @endpoints = GoogleInSpec::Dataproc::Property::BatchRuntimeInfoEndpoints.new(args['endpoints'], to_s) + @output_uri = args['outputUri'] + @diagnostic_output_uri = args['diagnosticOutputUri'] + @approximate_usage = GoogleInSpec::Dataproc::Property::BatchRuntimeInfoApproximateUsage.new(args['approximateUsage'], to_s) + @current_usage = GoogleInSpec::Dataproc::Property::BatchRuntimeInfoCurrentUsage.new(args['currentUsage'], to_s) + end + + def to_s + "#{@parent_identifier} BatchRuntimeInfo" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_runtime_info_approximate_usage.rb b/libraries/google/dataproc/property/batch_runtime_info_approximate_usage.rb new file mode 100644 index 00000000..ea826287 --- /dev/null +++ b/libraries/google/dataproc/property/batch_runtime_info_approximate_usage.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchRuntimeInfoApproximateUsage + attr_reader :milli_dcu_seconds + + attr_reader :shuffle_storage_gb_seconds + + attr_reader :milli_accelerator_seconds + + attr_reader :accelerator_type + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @milli_dcu_seconds = args['milliDcuSeconds'] + @shuffle_storage_gb_seconds = args['shuffleStorageGbSeconds'] + @milli_accelerator_seconds = args['milliAcceleratorSeconds'] + @accelerator_type = args['acceleratorType'] + end + + def to_s + "#{@parent_identifier} BatchRuntimeInfoApproximateUsage" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_runtime_info_current_usage.rb b/libraries/google/dataproc/property/batch_runtime_info_current_usage.rb new file mode 100644 index 00000000..6cd0bc82 --- /dev/null +++ b/libraries/google/dataproc/property/batch_runtime_info_current_usage.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchRuntimeInfoCurrentUsage + attr_reader :milli_dcu + + attr_reader :shuffle_storage_gb + + attr_reader :milli_dcu_premium + + attr_reader :shuffle_storage_gb_premium + + attr_reader :milli_accelerator + + attr_reader :accelerator_type + + attr_reader :snapshot_time + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @milli_dcu = args['milliDcu'] + @shuffle_storage_gb = args['shuffleStorageGb'] + @milli_dcu_premium = args['milliDcuPremium'] + @shuffle_storage_gb_premium = args['shuffleStorageGbPremium'] + @milli_accelerator = args['milliAccelerator'] + @accelerator_type = args['acceleratorType'] + @snapshot_time = args['snapshotTime'] + end + + def to_s + "#{@parent_identifier} BatchRuntimeInfoCurrentUsage" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_runtime_info_endpoints.rb b/libraries/google/dataproc/property/batch_runtime_info_endpoints.rb new file mode 100644 index 00000000..a6a62cc2 --- /dev/null +++ b/libraries/google/dataproc/property/batch_runtime_info_endpoints.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchRuntimeInfoEndpoints + attr_reader :additional_properties + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @additional_properties = args['additionalProperties'] + end + + def to_s + "#{@parent_identifier} BatchRuntimeInfoEndpoints" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_spark_batch.rb b/libraries/google/dataproc/property/batch_spark_batch.rb new file mode 100644 index 00000000..60dbad68 --- /dev/null +++ b/libraries/google/dataproc/property/batch_spark_batch.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchSparkBatch + attr_reader :main_jar_file_uri + + attr_reader :main_class + + attr_reader :args + + attr_reader :jar_file_uris + + attr_reader :file_uris + + attr_reader :archive_uris + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @main_jar_file_uri = args['mainJarFileUri'] + @main_class = args['mainClass'] + @args = args['args'] + @jar_file_uris = args['jarFileUris'] + @file_uris = args['fileUris'] + @archive_uris = args['archiveUris'] + end + + def to_s + "#{@parent_identifier} BatchSparkBatch" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_spark_r_batch.rb b/libraries/google/dataproc/property/batch_spark_r_batch.rb new file mode 100644 index 00000000..11a51055 --- /dev/null +++ b/libraries/google/dataproc/property/batch_spark_r_batch.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchSparkRBatch + attr_reader :main_r_file_uri + + attr_reader :args + + attr_reader :file_uris + + attr_reader :archive_uris + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @main_r_file_uri = args['mainRFileUri'] + @args = args['args'] + @file_uris = args['fileUris'] + @archive_uris = args['archiveUris'] + end + + def to_s + "#{@parent_identifier} BatchSparkRBatch" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_spark_sql_batch.rb b/libraries/google/dataproc/property/batch_spark_sql_batch.rb new file mode 100644 index 00000000..e0d18955 --- /dev/null +++ b/libraries/google/dataproc/property/batch_spark_sql_batch.rb @@ -0,0 +1,41 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +require 'google/dataproc/property/batch_spark_sql_batch_query_variables' +module GoogleInSpec + module Dataproc + module Property + class BatchSparkSqlBatch + attr_reader :query_file_uri + + attr_reader :query_variables + + attr_reader :jar_file_uris + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @query_file_uri = args['queryFileUri'] + @query_variables = GoogleInSpec::Dataproc::Property::BatchSparkSqlBatchQueryVariables.new(args['queryVariables'], to_s) + @jar_file_uris = args['jarFileUris'] + end + + def to_s + "#{@parent_identifier} BatchSparkSqlBatch" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_spark_sql_batch_query_variables.rb b/libraries/google/dataproc/property/batch_spark_sql_batch_query_variables.rb new file mode 100644 index 00000000..ca7d6878 --- /dev/null +++ b/libraries/google/dataproc/property/batch_spark_sql_batch_query_variables.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchSparkSqlBatchQueryVariables + attr_reader :additional_properties + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @additional_properties = args['additionalProperties'] + end + + def to_s + "#{@parent_identifier} BatchSparkSqlBatchQueryVariables" + end + end + end + end +end diff --git a/libraries/google/dataproc/property/batch_state_history.rb b/libraries/google/dataproc/property/batch_state_history.rb new file mode 100644 index 00000000..15e71c79 --- /dev/null +++ b/libraries/google/dataproc/property/batch_state_history.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +module GoogleInSpec + module Dataproc + module Property + class BatchStateHistory + attr_reader :state + + attr_reader :state_message + + attr_reader :state_start_time + + def initialize(args = nil, parent_identifier = nil) + return if args.nil? + @parent_identifier = parent_identifier + @state = args['state'] + @state_message = args['stateMessage'] + @state_start_time = args['stateStartTime'] + end + + def to_s + "#{@parent_identifier} BatchStateHistory" + end + end + + class BatchStateHistoryArray + def self.parse(value, parent_identifier) + return if value.nil? + return BatchStateHistory.new(value, parent_identifier) unless value.is_a?(::Array) + value.map { |v| BatchStateHistory.new(v, parent_identifier) } + end + end + end + end +end diff --git a/libraries/google_dataproc_batch.rb b/libraries/google_dataproc_batch.rb new file mode 100644 index 00000000..83f712ee --- /dev/null +++ b/libraries/google_dataproc_batch.rb @@ -0,0 +1,106 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +require 'gcp_backend' +require 'google/dataproc/property/batch_environment_config' +require 'google/dataproc/property/batch_environment_config_execution_config' +require 'google/dataproc/property/batch_environment_config_peripherals_config' +require 'google/dataproc/property/batch_environment_config_peripherals_config_spark_history_server_config' +require 'google/dataproc/property/batch_labels' +require 'google/dataproc/property/batch_pyspark_batch' +require 'google/dataproc/property/batch_runtime_config' +require 'google/dataproc/property/batch_runtime_config_properties' +require 'google/dataproc/property/batch_runtime_config_repository_config' +require 'google/dataproc/property/batch_runtime_config_repository_config_pypi_repository_config' +require 'google/dataproc/property/batch_runtime_info' +require 'google/dataproc/property/batch_runtime_info_approximate_usage' +require 'google/dataproc/property/batch_runtime_info_current_usage' +require 'google/dataproc/property/batch_runtime_info_endpoints' +require 'google/dataproc/property/batch_spark_batch' +require 'google/dataproc/property/batch_spark_r_batch' +require 'google/dataproc/property/batch_spark_sql_batch' +require 'google/dataproc/property/batch_spark_sql_batch_query_variables' +require 'google/dataproc/property/batch_state_history' + +# A provider to manage Dataproc resources. +class DataprocBatch < GcpResourceBase + name 'google_dataproc_batch' + desc 'Batch' + supports platform: 'gcp' + + attr_reader :params + attr_reader :name + attr_reader :uuid + attr_reader :create_time + attr_reader :pyspark_batch + attr_reader :spark_batch + attr_reader :spark_r_batch + attr_reader :spark_sql_batch + attr_reader :runtime_info + attr_reader :state + attr_reader :state_message + attr_reader :state_time + attr_reader :creator + attr_reader :labels + attr_reader :runtime_config + attr_reader :environment_config + attr_reader :operation + attr_reader :state_history + + def initialize(params) + super(params.merge({ use_http_transport: true })) + @params = params + @fetched = @connection.fetch(product_url(params[:beta]), resource_base_url, params, 'Get') + parse unless @fetched.nil? + end + + def parse + @name = @fetched['name'] + @uuid = @fetched['uuid'] + @create_time = @fetched['createTime'] + @pyspark_batch = GoogleInSpec::Dataproc::Property::BatchPysparkBatch.new(@fetched['pysparkBatch'], to_s) + @spark_batch = GoogleInSpec::Dataproc::Property::BatchSparkBatch.new(@fetched['sparkBatch'], to_s) + @spark_r_batch = GoogleInSpec::Dataproc::Property::BatchSparkRBatch.new(@fetched['sparkRBatch'], to_s) + @spark_sql_batch = GoogleInSpec::Dataproc::Property::BatchSparkSqlBatch.new(@fetched['sparkSqlBatch'], to_s) + @runtime_info = GoogleInSpec::Dataproc::Property::BatchRuntimeInfo.new(@fetched['runtimeInfo'], to_s) + @state = @fetched['state'] + @state_message = @fetched['stateMessage'] + @state_time = @fetched['stateTime'] + @creator = @fetched['creator'] + @labels = GoogleInSpec::Dataproc::Property::BatchLabels.new(@fetched['labels'], to_s) + @runtime_config = GoogleInSpec::Dataproc::Property::BatchRuntimeConfig.new(@fetched['runtimeConfig'], to_s) + @environment_config = GoogleInSpec::Dataproc::Property::BatchEnvironmentConfig.new(@fetched['environmentConfig'], to_s) + @operation = @fetched['operation'] + @state_history = GoogleInSpec::Dataproc::Property::BatchStateHistoryArray.parse(@fetched['stateHistory'], to_s) + end + + def exists? + !@fetched.nil? + end + + def to_s + "Batch #{@params[:name]}" + end + + private + + def product_url(_ = nil) + 'https://dataproc.googleapis.com/v1/' + end + + def resource_base_url + '{{name}}' + end +end diff --git a/libraries/google_dataproc_batches.rb b/libraries/google_dataproc_batches.rb new file mode 100644 index 00000000..92731c94 --- /dev/null +++ b/libraries/google_dataproc_batches.rb @@ -0,0 +1,111 @@ +# frozen_string_literal: false + +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- +require 'gcp_backend' +class DataprocBatchs < GcpResourceBase + name 'google_dataproc_batches' + desc 'Batch plural resource' + supports platform: 'gcp' + + attr_reader :table + + filter_table_config = FilterTable.create + + filter_table_config.add(:names, field: :name) + filter_table_config.add(:uuids, field: :uuid) + filter_table_config.add(:create_times, field: :create_time) + filter_table_config.add(:pyspark_batches, field: :pyspark_batch) + filter_table_config.add(:spark_batches, field: :spark_batch) + filter_table_config.add(:spark_r_batches, field: :spark_r_batch) + filter_table_config.add(:spark_sql_batches, field: :spark_sql_batch) + filter_table_config.add(:runtime_infos, field: :runtime_info) + filter_table_config.add(:states, field: :state) + filter_table_config.add(:state_messages, field: :state_message) + filter_table_config.add(:state_times, field: :state_time) + filter_table_config.add(:creators, field: :creator) + filter_table_config.add(:labels, field: :labels) + filter_table_config.add(:runtime_configs, field: :runtime_config) + filter_table_config.add(:environment_configs, field: :environment_config) + filter_table_config.add(:operations, field: :operation) + filter_table_config.add(:state_histories, field: :state_history) + + filter_table_config.connect(self, :table) + + def initialize(params = {}) + super(params.merge({ use_http_transport: true })) + @params = params + @table = fetch_wrapped_resource('batches') + end + + def fetch_wrapped_resource(wrap_path) + # fetch_resource returns an array of responses (to handle pagination) + result = @connection.fetch_all(product_url, resource_base_url, @params, 'Get') + return if result.nil? + + # Conversion of string -> object hash to symbol -> object hash that InSpec needs + converted = [] + result.each do |response| + next if response.nil? || !response.key?(wrap_path) + response[wrap_path].each do |hash| + hash_with_symbols = {} + hash.each_key do |key| + name, value = transform(key, hash) + hash_with_symbols[name] = value + end + converted.push(hash_with_symbols) + end + end + + converted + end + + def transform(key, value) + return transformers[key].call(value) if transformers.key?(key) + + [key.to_sym, value] + end + + def transformers + { + 'name' => ->(obj) { [:name, obj['name']] }, + 'uuid' => ->(obj) { [:uuid, obj['uuid']] }, + 'createTime' => ->(obj) { [:create_time, obj['createTime']] }, + 'pysparkBatch' => ->(obj) { [:pyspark_batch, GoogleInSpec::Dataproc::Property::BatchPysparkBatch.new(obj['pysparkBatch'], to_s)] }, + 'sparkBatch' => ->(obj) { [:spark_batch, GoogleInSpec::Dataproc::Property::BatchSparkBatch.new(obj['sparkBatch'], to_s)] }, + 'sparkRBatch' => ->(obj) { [:spark_r_batch, GoogleInSpec::Dataproc::Property::BatchSparkRBatch.new(obj['sparkRBatch'], to_s)] }, + 'sparkSqlBatch' => ->(obj) { [:spark_sql_batch, GoogleInSpec::Dataproc::Property::BatchSparkSqlBatch.new(obj['sparkSqlBatch'], to_s)] }, + 'runtimeInfo' => ->(obj) { [:runtime_info, GoogleInSpec::Dataproc::Property::BatchRuntimeInfo.new(obj['runtimeInfo'], to_s)] }, + 'state' => ->(obj) { [:state, obj['state']] }, + 'stateMessage' => ->(obj) { [:state_message, obj['stateMessage']] }, + 'stateTime' => ->(obj) { [:state_time, obj['stateTime']] }, + 'creator' => ->(obj) { [:creator, obj['creator']] }, + 'labels' => ->(obj) { [:labels, GoogleInSpec::Dataproc::Property::BatchLabels.new(obj['labels'], to_s)] }, + 'runtimeConfig' => ->(obj) { [:runtime_config, GoogleInSpec::Dataproc::Property::BatchRuntimeConfig.new(obj['runtimeConfig'], to_s)] }, + 'environmentConfig' => ->(obj) { [:environment_config, GoogleInSpec::Dataproc::Property::BatchEnvironmentConfig.new(obj['environmentConfig'], to_s)] }, + 'operation' => ->(obj) { [:operation, obj['operation']] }, + 'stateHistory' => ->(obj) { [:state_history, GoogleInSpec::Dataproc::Property::BatchStateHistoryArray.parse(obj['stateHistory'], to_s)] }, + } + end + + private + + def product_url(_ = nil) + 'https://dataproc.googleapis.com/v1/' + end + + def resource_base_url + '{{parent}}/batches' + end +end diff --git a/test/integration/build/gcp-mm.tf b/test/integration/build/gcp-mm.tf index 2665a170..c91986be 100644 --- a/test/integration/build/gcp-mm.tf +++ b/test/integration/build/gcp-mm.tf @@ -269,6 +269,9 @@ variable "data_fusion_instance" { variable "cloud_run_jobs" { type = any } +variable "dataproc_serverless_batches" { + type = any +} variable "monitoring_group" { type = any } @@ -2248,6 +2251,30 @@ resource "google_cloud_run_v2_job" "default" { } } } +resource "google_dataproc_batch" "inspec_batch_spark" { + + batch_id = var.dataproc_serverless_batches.name + location = var.dataproc_serverless_batches.location + labels = {"app": "inspec"} + project = var.gcp_project_id + runtime_config { + properties = { "spark.dynamicAllocation.enabled": "false", "spark.executor.instances": "2" } + } + + environment_config { + execution_config { + subnetwork_uri = "default" + ttl = "3600s" + network_tags = ["tag1"] + } + } + + spark_batch { + main_class = var.dataproc_serverless_batches.main_class + args = [var.dataproc_serverless_batches.args] + jar_file_uris = [var.dataproc_serverless_batches.path] + } +} resource "google_monitoring_group" "inspec-test-group" { project = var.gcp_project_id display_name = var.monitoring_group.name diff --git a/test/integration/configuration/mm-attributes.yml b/test/integration/configuration/mm-attributes.yml index 7c0ec298..e6ec72bb 100644 --- a/test/integration/configuration/mm-attributes.yml +++ b/test/integration/configuration/mm-attributes.yml @@ -752,6 +752,13 @@ cloud_run_jobs: deletion_protection: "false" image: "us-central1-docker.pkg.dev/ppradhan/nas/balasubs_tutorial1_20230915_182543:latest" +dataproc_serverless_batches: + name: "inspec-test-batch-0052" + location: "us-central1" + main_class: "org.apache.spark.examples.SparkPi" + args: "10" + path: "file:///usr/lib/spark/examples/jars/spark-examples.jar" + monitoring_group: name: "inspec-test-group" filter: "resource.metadata.name = has_substring(\"inspec\")" diff --git a/test/integration/verify/controls/google_dataproc_batch.rb b/test/integration/verify/controls/google_dataproc_batch.rb new file mode 100644 index 00000000..884a70d8 --- /dev/null +++ b/test/integration/verify/controls/google_dataproc_batch.rb @@ -0,0 +1,49 @@ +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- + +title 'Test GCP google_dataproc_batch resource.' + +gcp_project_id = input(:gcp_project_id, value: 'gcp_project_id', description: 'The GCP project identifier.') + +batch = input('batch', value: { + "name": "projects/ppradhan/locations/us-central1/batches/inspec-test-batch-0052", + "parent": "projects/ppradhan/locations/us-central1", + "uuid": "5a1b8402-2aa5-4578-98ee-2ff12ff2a14e", + "create_time": "2024-10-15T06:42:29.671473Z", + "state": "SUCCEEDED", + "state_time": "2024-10-15T06:44:55.114445Z", + "creator": "bala-local@ppradhan.iam.gserviceaccount.com", + "operation": "projects/ppradhan/regions/us-central1/operations/19a2ac29-3564-49b8-8116-c36dd98d9cd5" +}, description: 'batch description') +control 'google_dataproc_batch-1.0' do + impact 1.0 + title 'google_dataproc_batch resource test' + + describe google_dataproc_batch(name: batch['name']) do + it { should exist } + its('name') { should cmp batch['name'] } + its('uuid') { should cmp batch['uuid'] } + its('create_time') { should cmp batch['create_time'] } + its('state') { should cmp batch['state'] } + its('state_message') { should cmp batch['state_message'] } + its('state_time') { should cmp batch['state_time'] } + its('creator') { should cmp batch['creator'] } + its('operation') { should cmp batch['operation'] } + + end + + describe google_dataproc_batch(name: "does_not_exit") do + it { should_not exist } + end +end diff --git a/test/integration/verify/controls/google_dataproc_batches.rb b/test/integration/verify/controls/google_dataproc_batches.rb new file mode 100644 index 00000000..8210780b --- /dev/null +++ b/test/integration/verify/controls/google_dataproc_batches.rb @@ -0,0 +1,36 @@ +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in README.md and +# CONTRIBUTING.md located at the root of this package. +# +# ---------------------------------------------------------------------------- + +title 'Test GCP google_dataproc_batches resource.' + +gcp_project_id = input(:gcp_project_id, value: 'gcp_project_id', description: 'The GCP project identifier.') + +batch = input('batch', value: { + "name": "projects/ppradhan/locations/us-central1/batches/inspec-test-batch-0052", + "parent": "projects/ppradhan/locations/us-central1", + "uuid": "5a1b8402-2aa5-4578-98ee-2ff12ff2a14e", + "create_time": "2024-10-15T06:42:29.671473Z", + "state": "SUCCEEDED", + "state_time": "2024-10-15T06:44:55.114445Z", + "creator": "bala-local@ppradhan.iam.gserviceaccount.com", + "operation": "projects/ppradhan/regions/us-central1/operations/19a2ac29-3564-49b8-8116-c36dd98d9cd5" +}, description: 'batch description') +control 'google_dataproc_batches-1.0' do + impact 1.0 + title 'google_dataproc_batches resource test' + + describe google_dataproc_batches(parent: batch['parent']) do + it { should exist } + end +end